From 21eee1734456f15feaadeb05211a9e185927da03 Mon Sep 17 00:00:00 2001 From: evilhero Date: Sun, 10 Sep 2017 11:50:08 -0400 Subject: [PATCH] IMP:New scheduler tab (Manage / Activity) where you can see job current run status', next runtime, and prev runtimes as well as force/pause a job, FIX: Disabling torrents will now properly hide torrent information, IMP: Specified daemon port for deluge as an on-screen tip for more detail, IMP: Added 100,200,ALL as viewable watchlist views, FIX: When viewing pullist and annual integration enabled, if annual was present would incorrectly link to invalid annual series instead of the actual series itself, IMP: Added more detail error messages to metatagging errors and better handling of stranded files during cleanup, IMP: Improved some handling for weekly pull-list one-off's and refactored the nzb/oneoff post-processing into a seperate function for future callables, Moved all the main url locations for public torrent sites to the init module so that it can be cascaded down for use in other modules instead as a global, IMP: Added a 'deep_search_32p' variable in the config.ini for specific usage with 32p, where if there is more than one result will dig deeper into each result to try and figure out if there are series matches, as opposed to the default where it will only use ref32p table if available or just the first hit in a multiple series search results and ignore the remainder, FIX:Fixed some unknown characters appearing in the pullist due to unicode-related conversion problems, FIX: fixed some special cases of file parsing errors due to Volume label being named different than expected, FIX: Added a 3s pause between experimental searches to try and not hit their frequency limitation, IMP: Weekly Pullist One-off's will now show status of Snatched/Downloaded as required, FIX: Fixed some deluge parameter problems when using auto-snatch torrent script/option, IMP: Changed the downlocation in the auto-snatch option to an env variable instead of being passed to avoid unicode-related problems, FIX: Fixed some magnet-related issues for torrents when using a watchdir + TPSE, FIX: Added more verbose error message for rtorrent connection issues, FIX: Could not connect to rtorrent client if no username/password were provided, IMP: Set the db updater to run every 5 minutes on the watchlist, automatically refreshing the oldest updated series each time that is more than 5 hours old (force db update from the activity/job schedulers page will run the db updater against the entire watchlist in sequence), IMP: Attempt to handle long paths in windows (ie. > 256c) by prepending the unicode windows api character to the import a directory path (windows only), IMP: When manual metatagging a series, will update the series after all the metatagging has been completed as opposed to after each issue, IMP: Will now display available inkdrops on Config/Search Providers tab when using 32P (future will utilize/indicate inkdrop threshold when downloading) --- Mylar.py | 2 +- data/interfaces/default/comicdetails.html | 10 +- data/interfaces/default/config.html | 29 +- data/interfaces/default/css/style.css | 40 + data/interfaces/default/index.html | 2 +- data/interfaces/default/manage.html | 101 +- data/interfaces/default/weeklypull.html | 11 +- lib/apscheduler/__init__.py | 11 +- lib/apscheduler/events.py | 116 +- lib/apscheduler/executors/__init__.py | 0 lib/apscheduler/executors/asyncio.py | 49 + lib/apscheduler/executors/base.py | 137 ++ lib/apscheduler/executors/base_py3.py | 41 + lib/apscheduler/executors/debug.py | 20 + lib/apscheduler/executors/gevent.py | 30 + lib/apscheduler/executors/pool.py | 54 + lib/apscheduler/executors/tornado.py | 54 + lib/apscheduler/executors/twisted.py | 25 + lib/apscheduler/job.py | 363 ++-- lib/apscheduler/jobstores/base.py | 154 +- lib/apscheduler/jobstores/memory.py | 108 ++ lib/apscheduler/jobstores/mongodb.py | 141 ++ lib/apscheduler/jobstores/mongodb_store.py | 84 - lib/apscheduler/jobstores/ram_store.py | 25 - lib/apscheduler/jobstores/redis.py | 146 ++ lib/apscheduler/jobstores/rethinkdb.py | 153 ++ lib/apscheduler/jobstores/shelve_store.py | 65 - lib/apscheduler/jobstores/sqlalchemy.py | 148 ++ lib/apscheduler/jobstores/sqlalchemy_store.py | 87 - lib/apscheduler/jobstores/zookeeper.py | 179 ++ lib/apscheduler/scheduler.py | 559 ------ lib/apscheduler/schedulers/__init__.py | 12 + lib/apscheduler/schedulers/asyncio.py | 67 + lib/apscheduler/schedulers/background.py | 41 + lib/apscheduler/schedulers/base.py | 1006 +++++++++++ lib/apscheduler/schedulers/blocking.py | 33 + lib/apscheduler/schedulers/gevent.py | 35 + lib/apscheduler/schedulers/qt.py | 42 + lib/apscheduler/schedulers/tornado.py | 63 + lib/apscheduler/schedulers/twisted.py | 62 + lib/apscheduler/threadpool.py | 133 -- lib/apscheduler/triggers/__init__.py | 3 - lib/apscheduler/triggers/base.py | 19 + lib/apscheduler/triggers/cron/__init__.py | 157 +- lib/apscheduler/triggers/cron/expressions.py | 73 +- lib/apscheduler/triggers/cron/fields.py | 39 +- lib/apscheduler/triggers/date.py | 51 + lib/apscheduler/triggers/interval.py | 95 +- lib/apscheduler/triggers/simple.py | 17 - lib/apscheduler/util.py | 375 ++-- lib/concurrent/LICENSE | 48 + lib/concurrent/PKG-INFO | 16 + lib/concurrent/__init__.py | 3 + lib/concurrent/futures/__init__.py | 23 + lib/concurrent/futures/_base.py | 631 +++++++ lib/concurrent/futures/process.py | 363 ++++ lib/concurrent/futures/thread.py | 149 ++ lib/funcsigs/__init__.py | 829 +++++++++ lib/funcsigs/version.py | 1 + lib/pytz/LICENSE.txt | 19 + lib/pytz/README.txt | 575 +++++++ lib/pytz/__init__.py | 1513 +++++++++++++++++ lib/pytz/exceptions.py | 48 + lib/pytz/lazy.py | 168 ++ lib/pytz/reference.py | 127 ++ lib/pytz/tzfile.py | 137 ++ lib/pytz/tzinfo.py | 564 ++++++ lib/pytz/zoneinfo/Africa/Abidjan | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Accra | Bin 0 -> 840 bytes lib/pytz/zoneinfo/Africa/Addis_Ababa | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Algiers | Bin 0 -> 760 bytes lib/pytz/zoneinfo/Africa/Asmara | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Asmera | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Bamako | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Bangui | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Banjul | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Bissau | Bin 0 -> 208 bytes lib/pytz/zoneinfo/Africa/Blantyre | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Brazzaville | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Bujumbura | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Cairo | Bin 0 -> 2779 bytes lib/pytz/zoneinfo/Africa/Casablanca | Bin 0 -> 1657 bytes lib/pytz/zoneinfo/Africa/Ceuta | Bin 0 -> 2075 bytes lib/pytz/zoneinfo/Africa/Conakry | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Dakar | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Dar_es_Salaam | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Djibouti | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Douala | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/El_Aaiun | Bin 0 -> 1487 bytes lib/pytz/zoneinfo/Africa/Freetown | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Gaborone | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Harare | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Johannesburg | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Juba | Bin 0 -> 683 bytes lib/pytz/zoneinfo/Africa/Kampala | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Khartoum | Bin 0 -> 683 bytes lib/pytz/zoneinfo/Africa/Kigali | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Kinshasa | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Lagos | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Libreville | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Lome | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Luanda | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Lubumbashi | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Lusaka | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Malabo | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Maputo | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Maseru | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Mbabane | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Mogadishu | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Monrovia | Bin 0 -> 241 bytes lib/pytz/zoneinfo/Africa/Nairobi | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Africa/Ndjamena | Bin 0 -> 225 bytes lib/pytz/zoneinfo/Africa/Niamey | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Nouakchott | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Ouagadougou | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Porto-Novo | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Africa/Sao_Tome | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Timbuktu | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Africa/Tripoli | Bin 0 -> 655 bytes lib/pytz/zoneinfo/Africa/Tunis | Bin 0 -> 710 bytes lib/pytz/zoneinfo/Africa/Windhoek | Bin 0 -> 1582 bytes lib/pytz/zoneinfo/America/Adak | Bin 0 -> 2379 bytes lib/pytz/zoneinfo/America/Anchorage | Bin 0 -> 2384 bytes lib/pytz/zoneinfo/America/Anguilla | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Antigua | Bin 0 -> 208 bytes lib/pytz/zoneinfo/America/Araguaina | Bin 0 -> 896 bytes .../zoneinfo/America/Argentina/Buenos_Aires | Bin 0 -> 1087 bytes lib/pytz/zoneinfo/America/Argentina/Catamarca | Bin 0 -> 1129 bytes .../zoneinfo/America/Argentina/ComodRivadavia | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Argentina/Cordoba | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Argentina/Jujuy | Bin 0 -> 1145 bytes lib/pytz/zoneinfo/America/Argentina/La_Rioja | Bin 0 -> 1143 bytes lib/pytz/zoneinfo/America/Argentina/Mendoza | Bin 0 -> 1173 bytes .../zoneinfo/America/Argentina/Rio_Gallegos | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Argentina/Salta | Bin 0 -> 1101 bytes lib/pytz/zoneinfo/America/Argentina/San_Juan | Bin 0 -> 1143 bytes lib/pytz/zoneinfo/America/Argentina/San_Luis | Bin 0 -> 1171 bytes lib/pytz/zoneinfo/America/Argentina/Tucuman | Bin 0 -> 1157 bytes lib/pytz/zoneinfo/America/Argentina/Ushuaia | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Aruba | Bin 0 -> 208 bytes lib/pytz/zoneinfo/America/Asuncion | Bin 0 -> 2062 bytes lib/pytz/zoneinfo/America/Atikokan | Bin 0 -> 345 bytes lib/pytz/zoneinfo/America/Atka | Bin 0 -> 2379 bytes lib/pytz/zoneinfo/America/Bahia | Bin 0 -> 1036 bytes lib/pytz/zoneinfo/America/Bahia_Banderas | Bin 0 -> 1588 bytes lib/pytz/zoneinfo/America/Barbados | Bin 0 -> 344 bytes lib/pytz/zoneinfo/America/Belem | Bin 0 -> 588 bytes lib/pytz/zoneinfo/America/Belize | Bin 0 -> 976 bytes lib/pytz/zoneinfo/America/Blanc-Sablon | Bin 0 -> 307 bytes lib/pytz/zoneinfo/America/Boa_Vista | Bin 0 -> 644 bytes lib/pytz/zoneinfo/America/Bogota | Bin 0 -> 257 bytes lib/pytz/zoneinfo/America/Boise | Bin 0 -> 2403 bytes lib/pytz/zoneinfo/America/Buenos_Aires | Bin 0 -> 1087 bytes lib/pytz/zoneinfo/America/Cambridge_Bay | Bin 0 -> 2098 bytes lib/pytz/zoneinfo/America/Campo_Grande | Bin 0 -> 2015 bytes lib/pytz/zoneinfo/America/Cancun | Bin 0 -> 1480 bytes lib/pytz/zoneinfo/America/Caracas | Bin 0 -> 266 bytes lib/pytz/zoneinfo/America/Catamarca | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Cayenne | Bin 0 -> 200 bytes lib/pytz/zoneinfo/America/Cayman | Bin 0 -> 203 bytes lib/pytz/zoneinfo/America/Chicago | Bin 0 -> 3585 bytes lib/pytz/zoneinfo/America/Chihuahua | Bin 0 -> 1522 bytes lib/pytz/zoneinfo/America/Coral_Harbour | Bin 0 -> 345 bytes lib/pytz/zoneinfo/America/Cordoba | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Costa_Rica | Bin 0 -> 341 bytes lib/pytz/zoneinfo/America/Creston | Bin 0 -> 233 bytes lib/pytz/zoneinfo/America/Cuiaba | Bin 0 -> 1987 bytes lib/pytz/zoneinfo/America/Curacao | Bin 0 -> 208 bytes lib/pytz/zoneinfo/America/Danmarkshavn | Bin 0 -> 714 bytes lib/pytz/zoneinfo/America/Dawson | Bin 0 -> 2093 bytes lib/pytz/zoneinfo/America/Dawson_Creek | Bin 0 -> 1059 bytes lib/pytz/zoneinfo/America/Denver | Bin 0 -> 2453 bytes lib/pytz/zoneinfo/America/Detroit | Bin 0 -> 2216 bytes lib/pytz/zoneinfo/America/Dominica | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Edmonton | Bin 0 -> 2402 bytes lib/pytz/zoneinfo/America/Eirunepe | Bin 0 -> 684 bytes lib/pytz/zoneinfo/America/El_Salvador | Bin 0 -> 250 bytes lib/pytz/zoneinfo/America/Ensenada | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/America/Fort_Wayne | Bin 0 -> 1675 bytes lib/pytz/zoneinfo/America/Fortaleza | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/Glace_Bay | Bin 0 -> 2206 bytes lib/pytz/zoneinfo/America/Godthab | Bin 0 -> 1877 bytes lib/pytz/zoneinfo/America/Goose_Bay | Bin 0 -> 3219 bytes lib/pytz/zoneinfo/America/Grand_Turk | Bin 0 -> 1287 bytes lib/pytz/zoneinfo/America/Grenada | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Guadeloupe | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Guatemala | Bin 0 -> 306 bytes lib/pytz/zoneinfo/America/Guayaquil | Bin 0 -> 203 bytes lib/pytz/zoneinfo/America/Guyana | Bin 0 -> 270 bytes lib/pytz/zoneinfo/America/Halifax | Bin 0 -> 3438 bytes lib/pytz/zoneinfo/America/Havana | Bin 0 -> 2437 bytes lib/pytz/zoneinfo/America/Hermosillo | Bin 0 -> 454 bytes .../zoneinfo/America/Indiana/Indianapolis | Bin 0 -> 1675 bytes lib/pytz/zoneinfo/America/Indiana/Knox | Bin 0 -> 2437 bytes lib/pytz/zoneinfo/America/Indiana/Marengo | Bin 0 -> 1731 bytes lib/pytz/zoneinfo/America/Indiana/Petersburg | Bin 0 -> 1913 bytes lib/pytz/zoneinfo/America/Indiana/Tell_City | Bin 0 -> 1735 bytes lib/pytz/zoneinfo/America/Indiana/Vevay | Bin 0 -> 1423 bytes lib/pytz/zoneinfo/America/Indiana/Vincennes | Bin 0 -> 1703 bytes lib/pytz/zoneinfo/America/Indiana/Winamac | Bin 0 -> 1787 bytes lib/pytz/zoneinfo/America/Indianapolis | Bin 0 -> 1675 bytes lib/pytz/zoneinfo/America/Inuvik | Bin 0 -> 1928 bytes lib/pytz/zoneinfo/America/Iqaluit | Bin 0 -> 2046 bytes lib/pytz/zoneinfo/America/Jamaica | Bin 0 -> 507 bytes lib/pytz/zoneinfo/America/Jujuy | Bin 0 -> 1145 bytes lib/pytz/zoneinfo/America/Juneau | Bin 0 -> 2362 bytes lib/pytz/zoneinfo/America/Kentucky/Louisville | Bin 0 -> 2781 bytes lib/pytz/zoneinfo/America/Kentucky/Monticello | Bin 0 -> 2361 bytes lib/pytz/zoneinfo/America/Knox_IN | Bin 0 -> 2437 bytes lib/pytz/zoneinfo/America/Kralendijk | Bin 0 -> 208 bytes lib/pytz/zoneinfo/America/La_Paz | Bin 0 -> 243 bytes lib/pytz/zoneinfo/America/Lima | Bin 0 -> 417 bytes lib/pytz/zoneinfo/America/Los_Angeles | Bin 0 -> 2845 bytes lib/pytz/zoneinfo/America/Louisville | Bin 0 -> 2781 bytes lib/pytz/zoneinfo/America/Lower_Princes | Bin 0 -> 208 bytes lib/pytz/zoneinfo/America/Maceio | Bin 0 -> 756 bytes lib/pytz/zoneinfo/America/Managua | Bin 0 -> 463 bytes lib/pytz/zoneinfo/America/Manaus | Bin 0 -> 616 bytes lib/pytz/zoneinfo/America/Marigot | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Martinique | Bin 0 -> 257 bytes lib/pytz/zoneinfo/America/Matamoros | Bin 0 -> 1416 bytes lib/pytz/zoneinfo/America/Mazatlan | Bin 0 -> 1564 bytes lib/pytz/zoneinfo/America/Mendoza | Bin 0 -> 1173 bytes lib/pytz/zoneinfo/America/Menominee | Bin 0 -> 2283 bytes lib/pytz/zoneinfo/America/Merida | Bin 0 -> 1456 bytes lib/pytz/zoneinfo/America/Metlakatla | Bin 0 -> 716 bytes lib/pytz/zoneinfo/America/Mexico_City | Bin 0 -> 1618 bytes lib/pytz/zoneinfo/America/Miquelon | Bin 0 -> 1684 bytes lib/pytz/zoneinfo/America/Moncton | Bin 0 -> 3163 bytes lib/pytz/zoneinfo/America/Monterrey | Bin 0 -> 1416 bytes lib/pytz/zoneinfo/America/Montevideo | Bin 0 -> 2160 bytes lib/pytz/zoneinfo/America/Montreal | Bin 0 -> 3503 bytes lib/pytz/zoneinfo/America/Montserrat | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Nassau | Bin 0 -> 2284 bytes lib/pytz/zoneinfo/America/New_York | Bin 0 -> 3545 bytes lib/pytz/zoneinfo/America/Nipigon | Bin 0 -> 2131 bytes lib/pytz/zoneinfo/America/Nome | Bin 0 -> 2376 bytes lib/pytz/zoneinfo/America/Noronha | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/North_Dakota/Beulah | Bin 0 -> 2389 bytes lib/pytz/zoneinfo/America/North_Dakota/Center | Bin 0 -> 2389 bytes .../zoneinfo/America/North_Dakota/New_Salem | Bin 0 -> 2389 bytes lib/pytz/zoneinfo/America/Ojinaga | Bin 0 -> 1522 bytes lib/pytz/zoneinfo/America/Panama | Bin 0 -> 203 bytes lib/pytz/zoneinfo/America/Pangnirtung | Bin 0 -> 2108 bytes lib/pytz/zoneinfo/America/Paramaribo | Bin 0 -> 308 bytes lib/pytz/zoneinfo/America/Phoenix | Bin 0 -> 353 bytes lib/pytz/zoneinfo/America/Port-au-Prince | Bin 0 -> 1483 bytes lib/pytz/zoneinfo/America/Port_of_Spain | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Porto_Acre | Bin 0 -> 656 bytes lib/pytz/zoneinfo/America/Porto_Velho | Bin 0 -> 588 bytes lib/pytz/zoneinfo/America/Puerto_Rico | Bin 0 -> 255 bytes lib/pytz/zoneinfo/America/Rainy_River | Bin 0 -> 2131 bytes lib/pytz/zoneinfo/America/Rankin_Inlet | Bin 0 -> 1930 bytes lib/pytz/zoneinfo/America/Recife | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/Regina | Bin 0 -> 994 bytes lib/pytz/zoneinfo/America/Resolute | Bin 0 -> 1930 bytes lib/pytz/zoneinfo/America/Rio_Branco | Bin 0 -> 656 bytes lib/pytz/zoneinfo/America/Rosario | Bin 0 -> 1129 bytes lib/pytz/zoneinfo/America/Santa_Isabel | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/America/Santarem | Bin 0 -> 626 bytes lib/pytz/zoneinfo/America/Santiago | Bin 0 -> 2531 bytes lib/pytz/zoneinfo/America/Santo_Domingo | Bin 0 -> 489 bytes lib/pytz/zoneinfo/America/Sao_Paulo | Bin 0 -> 2015 bytes lib/pytz/zoneinfo/America/Scoresbysund | Bin 0 -> 1925 bytes lib/pytz/zoneinfo/America/Shiprock | Bin 0 -> 2453 bytes lib/pytz/zoneinfo/America/Sitka | Bin 0 -> 2350 bytes lib/pytz/zoneinfo/America/St_Barthelemy | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/St_Johns | Bin 0 -> 3664 bytes lib/pytz/zoneinfo/America/St_Kitts | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/St_Lucia | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/St_Thomas | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/St_Vincent | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Swift_Current | Bin 0 -> 574 bytes lib/pytz/zoneinfo/America/Tegucigalpa | Bin 0 -> 278 bytes lib/pytz/zoneinfo/America/Thule | Bin 0 -> 1528 bytes lib/pytz/zoneinfo/America/Thunder_Bay | Bin 0 -> 2211 bytes lib/pytz/zoneinfo/America/Tijuana | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/America/Toronto | Bin 0 -> 3503 bytes lib/pytz/zoneinfo/America/Tortola | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Vancouver | Bin 0 -> 2901 bytes lib/pytz/zoneinfo/America/Virgin | Bin 0 -> 170 bytes lib/pytz/zoneinfo/America/Whitehorse | Bin 0 -> 2093 bytes lib/pytz/zoneinfo/America/Winnipeg | Bin 0 -> 2891 bytes lib/pytz/zoneinfo/America/Yakutat | Bin 0 -> 2314 bytes lib/pytz/zoneinfo/America/Yellowknife | Bin 0 -> 1980 bytes lib/pytz/zoneinfo/Antarctica/Casey | Bin 0 -> 272 bytes lib/pytz/zoneinfo/Antarctica/Davis | Bin 0 -> 290 bytes lib/pytz/zoneinfo/Antarctica/DumontDUrville | Bin 0 -> 227 bytes lib/pytz/zoneinfo/Antarctica/Macquarie | Bin 0 -> 1530 bytes lib/pytz/zoneinfo/Antarctica/Mawson | Bin 0 -> 204 bytes lib/pytz/zoneinfo/Antarctica/McMurdo | Bin 0 -> 2460 bytes lib/pytz/zoneinfo/Antarctica/Palmer | Bin 0 -> 2054 bytes lib/pytz/zoneinfo/Antarctica/Rothera | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Antarctica/South_Pole | Bin 0 -> 2460 bytes lib/pytz/zoneinfo/Antarctica/Syowa | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Antarctica/Troll | Bin 0 -> 1161 bytes lib/pytz/zoneinfo/Antarctica/Vostok | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Arctic/Longyearbyen | Bin 0 -> 2251 bytes lib/pytz/zoneinfo/Asia/Aden | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Almaty | Bin 0 -> 936 bytes lib/pytz/zoneinfo/Asia/Amman | Bin 0 -> 1877 bytes lib/pytz/zoneinfo/Asia/Anadyr | Bin 0 -> 1197 bytes lib/pytz/zoneinfo/Asia/Aqtau | Bin 0 -> 1142 bytes lib/pytz/zoneinfo/Asia/Aqtobe | Bin 0 -> 1052 bytes lib/pytz/zoneinfo/Asia/Ashgabat | Bin 0 -> 671 bytes lib/pytz/zoneinfo/Asia/Ashkhabad | Bin 0 -> 671 bytes lib/pytz/zoneinfo/Asia/Baghdad | Bin 0 -> 988 bytes lib/pytz/zoneinfo/Asia/Bahrain | Bin 0 -> 209 bytes lib/pytz/zoneinfo/Asia/Baku | Bin 0 -> 1956 bytes lib/pytz/zoneinfo/Asia/Bangkok | Bin 0 -> 204 bytes lib/pytz/zoneinfo/Asia/Beirut | Bin 0 -> 2175 bytes lib/pytz/zoneinfo/Asia/Bishkek | Bin 0 -> 1061 bytes lib/pytz/zoneinfo/Asia/Brunei | Bin 0 -> 201 bytes lib/pytz/zoneinfo/Asia/Calcutta | Bin 0 -> 291 bytes lib/pytz/zoneinfo/Asia/Chita | Bin 0 -> 1236 bytes lib/pytz/zoneinfo/Asia/Choibalsan | Bin 0 -> 904 bytes lib/pytz/zoneinfo/Asia/Chongqing | Bin 0 -> 414 bytes lib/pytz/zoneinfo/Asia/Chungking | Bin 0 -> 414 bytes lib/pytz/zoneinfo/Asia/Colombo | Bin 0 -> 389 bytes lib/pytz/zoneinfo/Asia/Dacca | Bin 0 -> 390 bytes lib/pytz/zoneinfo/Asia/Damascus | Bin 0 -> 2320 bytes lib/pytz/zoneinfo/Asia/Dhaka | Bin 0 -> 390 bytes lib/pytz/zoneinfo/Asia/Dili | Bin 0 -> 309 bytes lib/pytz/zoneinfo/Asia/Dubai | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Dushanbe | Bin 0 -> 611 bytes lib/pytz/zoneinfo/Asia/Gaza | Bin 0 -> 2313 bytes lib/pytz/zoneinfo/Asia/Harbin | Bin 0 -> 414 bytes lib/pytz/zoneinfo/Asia/Hebron | Bin 0 -> 2341 bytes lib/pytz/zoneinfo/Asia/Ho_Chi_Minh | Bin 0 -> 373 bytes lib/pytz/zoneinfo/Asia/Hong_Kong | Bin 0 -> 1189 bytes lib/pytz/zoneinfo/Asia/Hovd | Bin 0 -> 848 bytes lib/pytz/zoneinfo/Asia/Irkutsk | Bin 0 -> 1259 bytes lib/pytz/zoneinfo/Asia/Istanbul | Bin 0 -> 2747 bytes lib/pytz/zoneinfo/Asia/Jakarta | Bin 0 -> 370 bytes lib/pytz/zoneinfo/Asia/Jayapura | Bin 0 -> 241 bytes lib/pytz/zoneinfo/Asia/Jerusalem | Bin 0 -> 2265 bytes lib/pytz/zoneinfo/Asia/Kabul | Bin 0 -> 199 bytes lib/pytz/zoneinfo/Asia/Kamchatka | Bin 0 -> 1181 bytes lib/pytz/zoneinfo/Asia/Karachi | Bin 0 -> 403 bytes lib/pytz/zoneinfo/Asia/Kashgar | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Kathmandu | Bin 0 -> 212 bytes lib/pytz/zoneinfo/Asia/Katmandu | Bin 0 -> 212 bytes lib/pytz/zoneinfo/Asia/Khandyga | Bin 0 -> 1324 bytes lib/pytz/zoneinfo/Asia/Kolkata | Bin 0 -> 291 bytes lib/pytz/zoneinfo/Asia/Krasnoyarsk | Bin 0 -> 1226 bytes lib/pytz/zoneinfo/Asia/Kuala_Lumpur | Bin 0 -> 398 bytes lib/pytz/zoneinfo/Asia/Kuching | Bin 0 -> 519 bytes lib/pytz/zoneinfo/Asia/Kuwait | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Macao | Bin 0 -> 795 bytes lib/pytz/zoneinfo/Asia/Macau | Bin 0 -> 795 bytes lib/pytz/zoneinfo/Asia/Magadan | Bin 0 -> 1227 bytes lib/pytz/zoneinfo/Asia/Makassar | Bin 0 -> 280 bytes lib/pytz/zoneinfo/Asia/Manila | Bin 0 -> 361 bytes lib/pytz/zoneinfo/Asia/Muscat | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Nicosia | Bin 0 -> 2016 bytes lib/pytz/zoneinfo/Asia/Novokuznetsk | Bin 0 -> 1248 bytes lib/pytz/zoneinfo/Asia/Novosibirsk | Bin 0 -> 1208 bytes lib/pytz/zoneinfo/Asia/Omsk | Bin 0 -> 1226 bytes lib/pytz/zoneinfo/Asia/Oral | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/Asia/Phnom_Penh | Bin 0 -> 204 bytes lib/pytz/zoneinfo/Asia/Pontianak | Bin 0 -> 375 bytes lib/pytz/zoneinfo/Asia/Pyongyang | Bin 0 -> 279 bytes lib/pytz/zoneinfo/Asia/Qatar | Bin 0 -> 209 bytes lib/pytz/zoneinfo/Asia/Qyzylorda | Bin 0 -> 1082 bytes lib/pytz/zoneinfo/Asia/Rangoon | Bin 0 -> 285 bytes lib/pytz/zoneinfo/Asia/Riyadh | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Saigon | Bin 0 -> 373 bytes lib/pytz/zoneinfo/Asia/Sakhalin | Bin 0 -> 1227 bytes lib/pytz/zoneinfo/Asia/Samarkand | Bin 0 -> 691 bytes lib/pytz/zoneinfo/Asia/Seoul | Bin 0 -> 571 bytes lib/pytz/zoneinfo/Asia/Shanghai | Bin 0 -> 414 bytes lib/pytz/zoneinfo/Asia/Singapore | Bin 0 -> 428 bytes lib/pytz/zoneinfo/Asia/Srednekolymsk | Bin 0 -> 1237 bytes lib/pytz/zoneinfo/Asia/Taipei | Bin 0 -> 800 bytes lib/pytz/zoneinfo/Asia/Tashkent | Bin 0 -> 681 bytes lib/pytz/zoneinfo/Asia/Tbilisi | Bin 0 -> 1142 bytes lib/pytz/zoneinfo/Asia/Tehran | Bin 0 -> 1661 bytes lib/pytz/zoneinfo/Asia/Tel_Aviv | Bin 0 -> 2265 bytes lib/pytz/zoneinfo/Asia/Thimbu | Bin 0 -> 209 bytes lib/pytz/zoneinfo/Asia/Thimphu | Bin 0 -> 209 bytes lib/pytz/zoneinfo/Asia/Tokyo | Bin 0 -> 355 bytes lib/pytz/zoneinfo/Asia/Ujung_Pandang | Bin 0 -> 280 bytes lib/pytz/zoneinfo/Asia/Ulaanbaatar | Bin 0 -> 848 bytes lib/pytz/zoneinfo/Asia/Ulan_Bator | Bin 0 -> 848 bytes lib/pytz/zoneinfo/Asia/Urumqi | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Asia/Ust-Nera | Bin 0 -> 1293 bytes lib/pytz/zoneinfo/Asia/Vientiane | Bin 0 -> 204 bytes lib/pytz/zoneinfo/Asia/Vladivostok | Bin 0 -> 1227 bytes lib/pytz/zoneinfo/Asia/Yakutsk | Bin 0 -> 1226 bytes lib/pytz/zoneinfo/Asia/Yekaterinburg | Bin 0 -> 1334 bytes lib/pytz/zoneinfo/Asia/Yerevan | Bin 0 -> 1277 bytes lib/pytz/zoneinfo/Atlantic/Azores | Bin 0 -> 3488 bytes lib/pytz/zoneinfo/Atlantic/Bermuda | Bin 0 -> 2004 bytes lib/pytz/zoneinfo/Atlantic/Canary | Bin 0 -> 1913 bytes lib/pytz/zoneinfo/Atlantic/Cape_Verde | Bin 0 -> 254 bytes lib/pytz/zoneinfo/Atlantic/Faeroe | Bin 0 -> 1829 bytes lib/pytz/zoneinfo/Atlantic/Faroe | Bin 0 -> 1829 bytes lib/pytz/zoneinfo/Atlantic/Jan_Mayen | Bin 0 -> 2251 bytes lib/pytz/zoneinfo/Atlantic/Madeira | Bin 0 -> 3478 bytes lib/pytz/zoneinfo/Atlantic/Reykjavik | Bin 0 -> 1167 bytes lib/pytz/zoneinfo/Atlantic/South_Georgia | Bin 0 -> 148 bytes lib/pytz/zoneinfo/Atlantic/St_Helena | Bin 0 -> 170 bytes lib/pytz/zoneinfo/Atlantic/Stanley | Bin 0 -> 1246 bytes lib/pytz/zoneinfo/Australia/ACT | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/Adelaide | Bin 0 -> 2238 bytes lib/pytz/zoneinfo/Australia/Brisbane | Bin 0 -> 452 bytes lib/pytz/zoneinfo/Australia/Broken_Hill | Bin 0 -> 2274 bytes lib/pytz/zoneinfo/Australia/Canberra | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/Currie | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/Darwin | Bin 0 -> 323 bytes lib/pytz/zoneinfo/Australia/Eucla | Bin 0 -> 487 bytes lib/pytz/zoneinfo/Australia/Hobart | Bin 0 -> 2335 bytes lib/pytz/zoneinfo/Australia/LHI | Bin 0 -> 1859 bytes lib/pytz/zoneinfo/Australia/Lindeman | Bin 0 -> 522 bytes lib/pytz/zoneinfo/Australia/Lord_Howe | Bin 0 -> 1859 bytes lib/pytz/zoneinfo/Australia/Melbourne | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/NSW | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/North | Bin 0 -> 323 bytes lib/pytz/zoneinfo/Australia/Perth | Bin 0 -> 479 bytes lib/pytz/zoneinfo/Australia/Queensland | Bin 0 -> 452 bytes lib/pytz/zoneinfo/Australia/South | Bin 0 -> 2238 bytes lib/pytz/zoneinfo/Australia/Sydney | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/Tasmania | Bin 0 -> 2335 bytes lib/pytz/zoneinfo/Australia/Victoria | Bin 0 -> 2223 bytes lib/pytz/zoneinfo/Australia/West | Bin 0 -> 479 bytes lib/pytz/zoneinfo/Australia/Yancowinna | Bin 0 -> 2274 bytes lib/pytz/zoneinfo/Brazil/Acre | Bin 0 -> 656 bytes lib/pytz/zoneinfo/Brazil/DeNoronha | Bin 0 -> 728 bytes lib/pytz/zoneinfo/Brazil/East | Bin 0 -> 2015 bytes lib/pytz/zoneinfo/Brazil/West | Bin 0 -> 616 bytes lib/pytz/zoneinfo/CET | Bin 0 -> 2102 bytes lib/pytz/zoneinfo/CST6CDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Canada/Atlantic | Bin 0 -> 3438 bytes lib/pytz/zoneinfo/Canada/Central | Bin 0 -> 2891 bytes lib/pytz/zoneinfo/Canada/East-Saskatchewan | Bin 0 -> 994 bytes lib/pytz/zoneinfo/Canada/Eastern | Bin 0 -> 3503 bytes lib/pytz/zoneinfo/Canada/Mountain | Bin 0 -> 2402 bytes lib/pytz/zoneinfo/Canada/Newfoundland | Bin 0 -> 3664 bytes lib/pytz/zoneinfo/Canada/Pacific | Bin 0 -> 2901 bytes lib/pytz/zoneinfo/Canada/Saskatchewan | Bin 0 -> 994 bytes lib/pytz/zoneinfo/Canada/Yukon | Bin 0 -> 2093 bytes lib/pytz/zoneinfo/Chile/Continental | Bin 0 -> 2531 bytes lib/pytz/zoneinfo/Chile/EasterIsland | Bin 0 -> 2295 bytes lib/pytz/zoneinfo/Cuba | Bin 0 -> 2437 bytes lib/pytz/zoneinfo/EET | Bin 0 -> 1876 bytes lib/pytz/zoneinfo/EST | Bin 0 -> 127 bytes lib/pytz/zoneinfo/EST5EDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Egypt | Bin 0 -> 2779 bytes lib/pytz/zoneinfo/Eire | Bin 0 -> 3559 bytes lib/pytz/zoneinfo/Etc/GMT | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/GMT+0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/GMT+1 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+10 | Bin 0 -> 139 bytes lib/pytz/zoneinfo/Etc/GMT+11 | Bin 0 -> 139 bytes lib/pytz/zoneinfo/Etc/GMT+12 | Bin 0 -> 139 bytes lib/pytz/zoneinfo/Etc/GMT+2 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+3 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+4 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+5 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+6 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+7 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+8 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT+9 | Bin 0 -> 135 bytes lib/pytz/zoneinfo/Etc/GMT-0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/GMT-1 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-10 | Bin 0 -> 140 bytes lib/pytz/zoneinfo/Etc/GMT-11 | Bin 0 -> 140 bytes lib/pytz/zoneinfo/Etc/GMT-12 | Bin 0 -> 140 bytes lib/pytz/zoneinfo/Etc/GMT-13 | Bin 0 -> 140 bytes lib/pytz/zoneinfo/Etc/GMT-14 | Bin 0 -> 140 bytes lib/pytz/zoneinfo/Etc/GMT-2 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-3 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-4 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-5 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-6 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-7 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-8 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT-9 | Bin 0 -> 136 bytes lib/pytz/zoneinfo/Etc/GMT0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/Greenwich | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/UCT | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/UTC | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/Universal | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Etc/Zulu | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Europe/Amsterdam | Bin 0 -> 2943 bytes lib/pytz/zoneinfo/Europe/Andorra | Bin 0 -> 1751 bytes lib/pytz/zoneinfo/Europe/Athens | Bin 0 -> 2271 bytes lib/pytz/zoneinfo/Europe/Belfast | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/Europe/Belgrade | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/Berlin | Bin 0 -> 2335 bytes lib/pytz/zoneinfo/Europe/Bratislava | Bin 0 -> 2272 bytes lib/pytz/zoneinfo/Europe/Brussels | Bin 0 -> 2970 bytes lib/pytz/zoneinfo/Europe/Bucharest | Bin 0 -> 2221 bytes lib/pytz/zoneinfo/Europe/Budapest | Bin 0 -> 2405 bytes lib/pytz/zoneinfo/Europe/Busingen | Bin 0 -> 1918 bytes lib/pytz/zoneinfo/Europe/Chisinau | Bin 0 -> 2433 bytes lib/pytz/zoneinfo/Europe/Copenhagen | Bin 0 -> 2160 bytes lib/pytz/zoneinfo/Europe/Dublin | Bin 0 -> 3559 bytes lib/pytz/zoneinfo/Europe/Gibraltar | Bin 0 -> 3061 bytes lib/pytz/zoneinfo/Europe/Guernsey | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/Europe/Helsinki | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Europe/Isle_of_Man | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/Europe/Istanbul | Bin 0 -> 2747 bytes lib/pytz/zoneinfo/Europe/Jersey | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/Europe/Kaliningrad | Bin 0 -> 1550 bytes lib/pytz/zoneinfo/Europe/Kiev | Bin 0 -> 2097 bytes lib/pytz/zoneinfo/Europe/Lisbon | Bin 0 -> 3453 bytes lib/pytz/zoneinfo/Europe/Ljubljana | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/London | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/Europe/Luxembourg | Bin 0 -> 2974 bytes lib/pytz/zoneinfo/Europe/Madrid | Bin 0 -> 2619 bytes lib/pytz/zoneinfo/Europe/Malta | Bin 0 -> 2629 bytes lib/pytz/zoneinfo/Europe/Mariehamn | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Europe/Minsk | Bin 0 -> 1368 bytes lib/pytz/zoneinfo/Europe/Monaco | Bin 0 -> 2953 bytes lib/pytz/zoneinfo/Europe/Moscow | Bin 0 -> 1528 bytes lib/pytz/zoneinfo/Europe/Nicosia | Bin 0 -> 2016 bytes lib/pytz/zoneinfo/Europe/Oslo | Bin 0 -> 2251 bytes lib/pytz/zoneinfo/Europe/Paris | Bin 0 -> 2971 bytes lib/pytz/zoneinfo/Europe/Podgorica | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/Prague | Bin 0 -> 2272 bytes lib/pytz/zoneinfo/Europe/Riga | Bin 0 -> 2235 bytes lib/pytz/zoneinfo/Europe/Rome | Bin 0 -> 2678 bytes lib/pytz/zoneinfo/Europe/Samara | Bin 0 -> 1394 bytes lib/pytz/zoneinfo/Europe/San_Marino | Bin 0 -> 2678 bytes lib/pytz/zoneinfo/Europe/Sarajevo | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/Simferopol | Bin 0 -> 1504 bytes lib/pytz/zoneinfo/Europe/Skopje | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/Sofia | Bin 0 -> 2130 bytes lib/pytz/zoneinfo/Europe/Stockholm | Bin 0 -> 1918 bytes lib/pytz/zoneinfo/Europe/Tallinn | Bin 0 -> 2201 bytes lib/pytz/zoneinfo/Europe/Tirane | Bin 0 -> 2098 bytes lib/pytz/zoneinfo/Europe/Tiraspol | Bin 0 -> 2433 bytes lib/pytz/zoneinfo/Europe/Uzhgorod | Bin 0 -> 2103 bytes lib/pytz/zoneinfo/Europe/Vaduz | Bin 0 -> 1918 bytes lib/pytz/zoneinfo/Europe/Vatican | Bin 0 -> 2678 bytes lib/pytz/zoneinfo/Europe/Vienna | Bin 0 -> 2237 bytes lib/pytz/zoneinfo/Europe/Vilnius | Bin 0 -> 2199 bytes lib/pytz/zoneinfo/Europe/Volgograd | Bin 0 -> 1317 bytes lib/pytz/zoneinfo/Europe/Warsaw | Bin 0 -> 2705 bytes lib/pytz/zoneinfo/Europe/Zagreb | Bin 0 -> 1957 bytes lib/pytz/zoneinfo/Europe/Zaporozhye | Bin 0 -> 2111 bytes lib/pytz/zoneinfo/Europe/Zurich | Bin 0 -> 1918 bytes lib/pytz/zoneinfo/Factory | Bin 0 -> 264 bytes lib/pytz/zoneinfo/GB | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/GB-Eire | Bin 0 -> 3687 bytes lib/pytz/zoneinfo/GMT | Bin 0 -> 127 bytes lib/pytz/zoneinfo/GMT+0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/GMT-0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/GMT0 | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Greenwich | Bin 0 -> 127 bytes lib/pytz/zoneinfo/HST | Bin 0 -> 128 bytes lib/pytz/zoneinfo/Hongkong | Bin 0 -> 1189 bytes lib/pytz/zoneinfo/Iceland | Bin 0 -> 1167 bytes lib/pytz/zoneinfo/Indian/Antananarivo | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Indian/Chagos | Bin 0 -> 201 bytes lib/pytz/zoneinfo/Indian/Christmas | Bin 0 -> 149 bytes lib/pytz/zoneinfo/Indian/Cocos | Bin 0 -> 152 bytes lib/pytz/zoneinfo/Indian/Comoro | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Indian/Kerguelen | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Indian/Mahe | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Indian/Maldives | Bin 0 -> 204 bytes lib/pytz/zoneinfo/Indian/Mauritius | Bin 0 -> 253 bytes lib/pytz/zoneinfo/Indian/Mayotte | Bin 0 -> 283 bytes lib/pytz/zoneinfo/Indian/Reunion | Bin 0 -> 171 bytes lib/pytz/zoneinfo/Iran | Bin 0 -> 1661 bytes lib/pytz/zoneinfo/Israel | Bin 0 -> 2265 bytes lib/pytz/zoneinfo/Jamaica | Bin 0 -> 507 bytes lib/pytz/zoneinfo/Japan | Bin 0 -> 355 bytes lib/pytz/zoneinfo/Kwajalein | Bin 0 -> 237 bytes lib/pytz/zoneinfo/Libya | Bin 0 -> 655 bytes lib/pytz/zoneinfo/MET | Bin 0 -> 2102 bytes lib/pytz/zoneinfo/MST | Bin 0 -> 127 bytes lib/pytz/zoneinfo/MST7MDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Mexico/BajaNorte | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/Mexico/BajaSur | Bin 0 -> 1564 bytes lib/pytz/zoneinfo/Mexico/General | Bin 0 -> 1618 bytes lib/pytz/zoneinfo/NZ | Bin 0 -> 2460 bytes lib/pytz/zoneinfo/NZ-CHAT | Bin 0 -> 2057 bytes lib/pytz/zoneinfo/Navajo | Bin 0 -> 2453 bytes lib/pytz/zoneinfo/PRC | Bin 0 -> 414 bytes lib/pytz/zoneinfo/PST8PDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Pacific/Apia | Bin 0 -> 1102 bytes lib/pytz/zoneinfo/Pacific/Auckland | Bin 0 -> 2460 bytes lib/pytz/zoneinfo/Pacific/Bougainville | Bin 0 -> 280 bytes lib/pytz/zoneinfo/Pacific/Chatham | Bin 0 -> 2057 bytes lib/pytz/zoneinfo/Pacific/Chuuk | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Easter | Bin 0 -> 2295 bytes lib/pytz/zoneinfo/Pacific/Efate | Bin 0 -> 478 bytes lib/pytz/zoneinfo/Pacific/Enderbury | Bin 0 -> 230 bytes lib/pytz/zoneinfo/Pacific/Fakaofo | Bin 0 -> 197 bytes lib/pytz/zoneinfo/Pacific/Fiji | Bin 0 -> 1074 bytes lib/pytz/zoneinfo/Pacific/Funafuti | Bin 0 -> 150 bytes lib/pytz/zoneinfo/Pacific/Galapagos | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Pacific/Gambier | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Pacific/Guadalcanal | Bin 0 -> 172 bytes lib/pytz/zoneinfo/Pacific/Guam | Bin 0 -> 225 bytes lib/pytz/zoneinfo/Pacific/Honolulu | Bin 0 -> 276 bytes lib/pytz/zoneinfo/Pacific/Johnston | Bin 0 -> 276 bytes lib/pytz/zoneinfo/Pacific/Kiritimati | Bin 0 -> 230 bytes lib/pytz/zoneinfo/Pacific/Kosrae | Bin 0 -> 230 bytes lib/pytz/zoneinfo/Pacific/Kwajalein | Bin 0 -> 237 bytes lib/pytz/zoneinfo/Pacific/Majuro | Bin 0 -> 197 bytes lib/pytz/zoneinfo/Pacific/Marquesas | Bin 0 -> 176 bytes lib/pytz/zoneinfo/Pacific/Midway | Bin 0 -> 294 bytes lib/pytz/zoneinfo/Pacific/Nauru | Bin 0 -> 254 bytes lib/pytz/zoneinfo/Pacific/Niue | Bin 0 -> 226 bytes lib/pytz/zoneinfo/Pacific/Norfolk | Bin 0 -> 208 bytes lib/pytz/zoneinfo/Pacific/Noumea | Bin 0 -> 314 bytes lib/pytz/zoneinfo/Pacific/Pago_Pago | Bin 0 -> 272 bytes lib/pytz/zoneinfo/Pacific/Palau | Bin 0 -> 149 bytes lib/pytz/zoneinfo/Pacific/Pitcairn | Bin 0 -> 203 bytes lib/pytz/zoneinfo/Pacific/Pohnpei | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Ponape | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Port_Moresby | Bin 0 -> 172 bytes lib/pytz/zoneinfo/Pacific/Rarotonga | Bin 0 -> 574 bytes lib/pytz/zoneinfo/Pacific/Saipan | Bin 0 -> 255 bytes lib/pytz/zoneinfo/Pacific/Samoa | Bin 0 -> 272 bytes lib/pytz/zoneinfo/Pacific/Tahiti | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Tarawa | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Tongatapu | Bin 0 -> 339 bytes lib/pytz/zoneinfo/Pacific/Truk | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Wake | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Pacific/Wallis | Bin 0 -> 150 bytes lib/pytz/zoneinfo/Pacific/Yap | Bin 0 -> 153 bytes lib/pytz/zoneinfo/Poland | Bin 0 -> 2705 bytes lib/pytz/zoneinfo/Portugal | Bin 0 -> 3453 bytes lib/pytz/zoneinfo/ROC | Bin 0 -> 800 bytes lib/pytz/zoneinfo/ROK | Bin 0 -> 571 bytes lib/pytz/zoneinfo/Singapore | Bin 0 -> 428 bytes lib/pytz/zoneinfo/Turkey | Bin 0 -> 2747 bytes lib/pytz/zoneinfo/UCT | Bin 0 -> 127 bytes lib/pytz/zoneinfo/US/Alaska | Bin 0 -> 2384 bytes lib/pytz/zoneinfo/US/Aleutian | Bin 0 -> 2379 bytes lib/pytz/zoneinfo/US/Arizona | Bin 0 -> 353 bytes lib/pytz/zoneinfo/US/Central | Bin 0 -> 3585 bytes lib/pytz/zoneinfo/US/East-Indiana | Bin 0 -> 1675 bytes lib/pytz/zoneinfo/US/Eastern | Bin 0 -> 3545 bytes lib/pytz/zoneinfo/US/Hawaii | Bin 0 -> 276 bytes lib/pytz/zoneinfo/US/Indiana-Starke | Bin 0 -> 2437 bytes lib/pytz/zoneinfo/US/Michigan | Bin 0 -> 2216 bytes lib/pytz/zoneinfo/US/Mountain | Bin 0 -> 2453 bytes lib/pytz/zoneinfo/US/Pacific | Bin 0 -> 2845 bytes lib/pytz/zoneinfo/US/Pacific-New | Bin 0 -> 2845 bytes lib/pytz/zoneinfo/US/Samoa | Bin 0 -> 272 bytes lib/pytz/zoneinfo/UTC | Bin 0 -> 127 bytes lib/pytz/zoneinfo/Universal | Bin 0 -> 127 bytes lib/pytz/zoneinfo/W-SU | Bin 0 -> 1528 bytes lib/pytz/zoneinfo/WET | Bin 0 -> 1873 bytes lib/pytz/zoneinfo/Zulu | Bin 0 -> 127 bytes lib/pytz/zoneinfo/iso3166.tab | 275 +++ lib/pytz/zoneinfo/localtime | Bin 0 -> 127 bytes lib/pytz/zoneinfo/posixrules | Bin 0 -> 3545 bytes lib/pytz/zoneinfo/zone.tab | 440 +++++ lib/pytz/zoneinfo/zone1970.tab | 371 ++++ lib/tzlocal/LICENSE.txt | 121 ++ lib/tzlocal/README.rst | 80 + lib/tzlocal/__init__.py | 7 + lib/tzlocal/darwin.py | 27 + lib/tzlocal/tests.py | 64 + lib/tzlocal/unix.py | 115 ++ lib/tzlocal/win32.py | 93 + lib/tzlocal/windows_tz.py | 542 ++++++ mylar/PostProcessor.py | 1135 +++++++++---- mylar/__init__.py | 342 ++-- mylar/auth32p.py | 46 +- mylar/cmtagmylar.py | 10 +- mylar/dbupdater.py | 10 +- mylar/filechecker.py | 11 +- mylar/findcomicfeed.py | 5 + mylar/helpers.py | 267 ++- mylar/rsscheck.py | 11 +- mylar/rsscheckit.py | 69 +- mylar/search.py | 2 +- mylar/searchit.py | 7 +- mylar/torrent/clients/deluge.py | 2 +- mylar/torrent/clients/rtorrent.py | 14 +- mylar/updater.py | 41 +- mylar/versioncheckit.py | 10 +- mylar/webserve.py | 382 ++++- mylar/weeklypull.py | 285 ++-- mylar/weeklypullit.py | 14 +- .../torrent-auto-snatch/getlftp.sh | 6 +- 683 files changed, 13046 insertions(+), 2239 deletions(-) mode change 100644 => 100755 data/interfaces/default/comicdetails.html mode change 100644 => 100755 data/interfaces/default/config.html create mode 100644 lib/apscheduler/executors/__init__.py create mode 100644 lib/apscheduler/executors/asyncio.py create mode 100644 lib/apscheduler/executors/base.py create mode 100644 lib/apscheduler/executors/base_py3.py create mode 100644 lib/apscheduler/executors/debug.py create mode 100644 lib/apscheduler/executors/gevent.py create mode 100644 lib/apscheduler/executors/pool.py create mode 100644 lib/apscheduler/executors/tornado.py create mode 100644 lib/apscheduler/executors/twisted.py create mode 100644 lib/apscheduler/jobstores/memory.py create mode 100644 lib/apscheduler/jobstores/mongodb.py delete mode 100644 lib/apscheduler/jobstores/mongodb_store.py delete mode 100644 lib/apscheduler/jobstores/ram_store.py create mode 100644 lib/apscheduler/jobstores/redis.py create mode 100644 lib/apscheduler/jobstores/rethinkdb.py delete mode 100644 lib/apscheduler/jobstores/shelve_store.py create mode 100644 lib/apscheduler/jobstores/sqlalchemy.py delete mode 100644 lib/apscheduler/jobstores/sqlalchemy_store.py create mode 100644 lib/apscheduler/jobstores/zookeeper.py delete mode 100644 lib/apscheduler/scheduler.py create mode 100644 lib/apscheduler/schedulers/__init__.py create mode 100644 lib/apscheduler/schedulers/asyncio.py create mode 100644 lib/apscheduler/schedulers/background.py create mode 100644 lib/apscheduler/schedulers/base.py create mode 100644 lib/apscheduler/schedulers/blocking.py create mode 100644 lib/apscheduler/schedulers/gevent.py create mode 100644 lib/apscheduler/schedulers/qt.py create mode 100644 lib/apscheduler/schedulers/tornado.py create mode 100644 lib/apscheduler/schedulers/twisted.py delete mode 100644 lib/apscheduler/threadpool.py create mode 100644 lib/apscheduler/triggers/base.py create mode 100644 lib/apscheduler/triggers/date.py delete mode 100644 lib/apscheduler/triggers/simple.py create mode 100644 lib/concurrent/LICENSE create mode 100644 lib/concurrent/PKG-INFO create mode 100644 lib/concurrent/__init__.py create mode 100644 lib/concurrent/futures/__init__.py create mode 100644 lib/concurrent/futures/_base.py create mode 100644 lib/concurrent/futures/process.py create mode 100644 lib/concurrent/futures/thread.py create mode 100644 lib/funcsigs/__init__.py create mode 100644 lib/funcsigs/version.py create mode 100644 lib/pytz/LICENSE.txt create mode 100644 lib/pytz/README.txt create mode 100644 lib/pytz/__init__.py create mode 100644 lib/pytz/exceptions.py create mode 100644 lib/pytz/lazy.py create mode 100644 lib/pytz/reference.py create mode 100644 lib/pytz/tzfile.py create mode 100644 lib/pytz/tzinfo.py create mode 100644 lib/pytz/zoneinfo/Africa/Abidjan create mode 100644 lib/pytz/zoneinfo/Africa/Accra create mode 100644 lib/pytz/zoneinfo/Africa/Addis_Ababa create mode 100644 lib/pytz/zoneinfo/Africa/Algiers create mode 100644 lib/pytz/zoneinfo/Africa/Asmara create mode 100644 lib/pytz/zoneinfo/Africa/Asmera create mode 100644 lib/pytz/zoneinfo/Africa/Bamako create mode 100644 lib/pytz/zoneinfo/Africa/Bangui create mode 100644 lib/pytz/zoneinfo/Africa/Banjul create mode 100644 lib/pytz/zoneinfo/Africa/Bissau create mode 100644 lib/pytz/zoneinfo/Africa/Blantyre create mode 100644 lib/pytz/zoneinfo/Africa/Brazzaville create mode 100644 lib/pytz/zoneinfo/Africa/Bujumbura create mode 100644 lib/pytz/zoneinfo/Africa/Cairo create mode 100644 lib/pytz/zoneinfo/Africa/Casablanca create mode 100644 lib/pytz/zoneinfo/Africa/Ceuta create mode 100644 lib/pytz/zoneinfo/Africa/Conakry create mode 100644 lib/pytz/zoneinfo/Africa/Dakar create mode 100644 lib/pytz/zoneinfo/Africa/Dar_es_Salaam create mode 100644 lib/pytz/zoneinfo/Africa/Djibouti create mode 100644 lib/pytz/zoneinfo/Africa/Douala create mode 100644 lib/pytz/zoneinfo/Africa/El_Aaiun create mode 100644 lib/pytz/zoneinfo/Africa/Freetown create mode 100644 lib/pytz/zoneinfo/Africa/Gaborone create mode 100644 lib/pytz/zoneinfo/Africa/Harare create mode 100644 lib/pytz/zoneinfo/Africa/Johannesburg create mode 100644 lib/pytz/zoneinfo/Africa/Juba create mode 100644 lib/pytz/zoneinfo/Africa/Kampala create mode 100644 lib/pytz/zoneinfo/Africa/Khartoum create mode 100644 lib/pytz/zoneinfo/Africa/Kigali create mode 100644 lib/pytz/zoneinfo/Africa/Kinshasa create mode 100644 lib/pytz/zoneinfo/Africa/Lagos create mode 100644 lib/pytz/zoneinfo/Africa/Libreville create mode 100644 lib/pytz/zoneinfo/Africa/Lome create mode 100644 lib/pytz/zoneinfo/Africa/Luanda create mode 100644 lib/pytz/zoneinfo/Africa/Lubumbashi create mode 100644 lib/pytz/zoneinfo/Africa/Lusaka create mode 100644 lib/pytz/zoneinfo/Africa/Malabo create mode 100644 lib/pytz/zoneinfo/Africa/Maputo create mode 100644 lib/pytz/zoneinfo/Africa/Maseru create mode 100644 lib/pytz/zoneinfo/Africa/Mbabane create mode 100644 lib/pytz/zoneinfo/Africa/Mogadishu create mode 100644 lib/pytz/zoneinfo/Africa/Monrovia create mode 100644 lib/pytz/zoneinfo/Africa/Nairobi create mode 100644 lib/pytz/zoneinfo/Africa/Ndjamena create mode 100644 lib/pytz/zoneinfo/Africa/Niamey create mode 100644 lib/pytz/zoneinfo/Africa/Nouakchott create mode 100644 lib/pytz/zoneinfo/Africa/Ouagadougou create mode 100644 lib/pytz/zoneinfo/Africa/Porto-Novo create mode 100644 lib/pytz/zoneinfo/Africa/Sao_Tome create mode 100644 lib/pytz/zoneinfo/Africa/Timbuktu create mode 100644 lib/pytz/zoneinfo/Africa/Tripoli create mode 100644 lib/pytz/zoneinfo/Africa/Tunis create mode 100644 lib/pytz/zoneinfo/Africa/Windhoek create mode 100644 lib/pytz/zoneinfo/America/Adak create mode 100644 lib/pytz/zoneinfo/America/Anchorage create mode 100644 lib/pytz/zoneinfo/America/Anguilla create mode 100644 lib/pytz/zoneinfo/America/Antigua create mode 100644 lib/pytz/zoneinfo/America/Araguaina create mode 100644 lib/pytz/zoneinfo/America/Argentina/Buenos_Aires create mode 100644 lib/pytz/zoneinfo/America/Argentina/Catamarca create mode 100644 lib/pytz/zoneinfo/America/Argentina/ComodRivadavia create mode 100644 lib/pytz/zoneinfo/America/Argentina/Cordoba create mode 100644 lib/pytz/zoneinfo/America/Argentina/Jujuy create mode 100644 lib/pytz/zoneinfo/America/Argentina/La_Rioja create mode 100644 lib/pytz/zoneinfo/America/Argentina/Mendoza create mode 100644 lib/pytz/zoneinfo/America/Argentina/Rio_Gallegos create mode 100644 lib/pytz/zoneinfo/America/Argentina/Salta create mode 100644 lib/pytz/zoneinfo/America/Argentina/San_Juan create mode 100644 lib/pytz/zoneinfo/America/Argentina/San_Luis create mode 100644 lib/pytz/zoneinfo/America/Argentina/Tucuman create mode 100644 lib/pytz/zoneinfo/America/Argentina/Ushuaia create mode 100644 lib/pytz/zoneinfo/America/Aruba create mode 100644 lib/pytz/zoneinfo/America/Asuncion create mode 100644 lib/pytz/zoneinfo/America/Atikokan create mode 100644 lib/pytz/zoneinfo/America/Atka create mode 100644 lib/pytz/zoneinfo/America/Bahia create mode 100644 lib/pytz/zoneinfo/America/Bahia_Banderas create mode 100644 lib/pytz/zoneinfo/America/Barbados create mode 100644 lib/pytz/zoneinfo/America/Belem create mode 100644 lib/pytz/zoneinfo/America/Belize create mode 100644 lib/pytz/zoneinfo/America/Blanc-Sablon create mode 100644 lib/pytz/zoneinfo/America/Boa_Vista create mode 100644 lib/pytz/zoneinfo/America/Bogota create mode 100644 lib/pytz/zoneinfo/America/Boise create mode 100644 lib/pytz/zoneinfo/America/Buenos_Aires create mode 100644 lib/pytz/zoneinfo/America/Cambridge_Bay create mode 100644 lib/pytz/zoneinfo/America/Campo_Grande create mode 100644 lib/pytz/zoneinfo/America/Cancun create mode 100644 lib/pytz/zoneinfo/America/Caracas create mode 100644 lib/pytz/zoneinfo/America/Catamarca create mode 100644 lib/pytz/zoneinfo/America/Cayenne create mode 100644 lib/pytz/zoneinfo/America/Cayman create mode 100644 lib/pytz/zoneinfo/America/Chicago create mode 100644 lib/pytz/zoneinfo/America/Chihuahua create mode 100644 lib/pytz/zoneinfo/America/Coral_Harbour create mode 100644 lib/pytz/zoneinfo/America/Cordoba create mode 100644 lib/pytz/zoneinfo/America/Costa_Rica create mode 100644 lib/pytz/zoneinfo/America/Creston create mode 100644 lib/pytz/zoneinfo/America/Cuiaba create mode 100644 lib/pytz/zoneinfo/America/Curacao create mode 100644 lib/pytz/zoneinfo/America/Danmarkshavn create mode 100644 lib/pytz/zoneinfo/America/Dawson create mode 100644 lib/pytz/zoneinfo/America/Dawson_Creek create mode 100644 lib/pytz/zoneinfo/America/Denver create mode 100644 lib/pytz/zoneinfo/America/Detroit create mode 100644 lib/pytz/zoneinfo/America/Dominica create mode 100644 lib/pytz/zoneinfo/America/Edmonton create mode 100644 lib/pytz/zoneinfo/America/Eirunepe create mode 100644 lib/pytz/zoneinfo/America/El_Salvador create mode 100644 lib/pytz/zoneinfo/America/Ensenada create mode 100644 lib/pytz/zoneinfo/America/Fort_Wayne create mode 100644 lib/pytz/zoneinfo/America/Fortaleza create mode 100644 lib/pytz/zoneinfo/America/Glace_Bay create mode 100644 lib/pytz/zoneinfo/America/Godthab create mode 100644 lib/pytz/zoneinfo/America/Goose_Bay create mode 100644 lib/pytz/zoneinfo/America/Grand_Turk create mode 100644 lib/pytz/zoneinfo/America/Grenada create mode 100644 lib/pytz/zoneinfo/America/Guadeloupe create mode 100644 lib/pytz/zoneinfo/America/Guatemala create mode 100644 lib/pytz/zoneinfo/America/Guayaquil create mode 100644 lib/pytz/zoneinfo/America/Guyana create mode 100644 lib/pytz/zoneinfo/America/Halifax create mode 100644 lib/pytz/zoneinfo/America/Havana create mode 100644 lib/pytz/zoneinfo/America/Hermosillo create mode 100644 lib/pytz/zoneinfo/America/Indiana/Indianapolis create mode 100644 lib/pytz/zoneinfo/America/Indiana/Knox create mode 100644 lib/pytz/zoneinfo/America/Indiana/Marengo create mode 100644 lib/pytz/zoneinfo/America/Indiana/Petersburg create mode 100644 lib/pytz/zoneinfo/America/Indiana/Tell_City create mode 100644 lib/pytz/zoneinfo/America/Indiana/Vevay create mode 100644 lib/pytz/zoneinfo/America/Indiana/Vincennes create mode 100644 lib/pytz/zoneinfo/America/Indiana/Winamac create mode 100644 lib/pytz/zoneinfo/America/Indianapolis create mode 100644 lib/pytz/zoneinfo/America/Inuvik create mode 100644 lib/pytz/zoneinfo/America/Iqaluit create mode 100644 lib/pytz/zoneinfo/America/Jamaica create mode 100644 lib/pytz/zoneinfo/America/Jujuy create mode 100644 lib/pytz/zoneinfo/America/Juneau create mode 100644 lib/pytz/zoneinfo/America/Kentucky/Louisville create mode 100644 lib/pytz/zoneinfo/America/Kentucky/Monticello create mode 100644 lib/pytz/zoneinfo/America/Knox_IN create mode 100644 lib/pytz/zoneinfo/America/Kralendijk create mode 100644 lib/pytz/zoneinfo/America/La_Paz create mode 100644 lib/pytz/zoneinfo/America/Lima create mode 100644 lib/pytz/zoneinfo/America/Los_Angeles create mode 100644 lib/pytz/zoneinfo/America/Louisville create mode 100644 lib/pytz/zoneinfo/America/Lower_Princes create mode 100644 lib/pytz/zoneinfo/America/Maceio create mode 100644 lib/pytz/zoneinfo/America/Managua create mode 100644 lib/pytz/zoneinfo/America/Manaus create mode 100644 lib/pytz/zoneinfo/America/Marigot create mode 100644 lib/pytz/zoneinfo/America/Martinique create mode 100644 lib/pytz/zoneinfo/America/Matamoros create mode 100644 lib/pytz/zoneinfo/America/Mazatlan create mode 100644 lib/pytz/zoneinfo/America/Mendoza create mode 100644 lib/pytz/zoneinfo/America/Menominee create mode 100644 lib/pytz/zoneinfo/America/Merida create mode 100644 lib/pytz/zoneinfo/America/Metlakatla create mode 100644 lib/pytz/zoneinfo/America/Mexico_City create mode 100644 lib/pytz/zoneinfo/America/Miquelon create mode 100644 lib/pytz/zoneinfo/America/Moncton create mode 100644 lib/pytz/zoneinfo/America/Monterrey create mode 100644 lib/pytz/zoneinfo/America/Montevideo create mode 100644 lib/pytz/zoneinfo/America/Montreal create mode 100644 lib/pytz/zoneinfo/America/Montserrat create mode 100644 lib/pytz/zoneinfo/America/Nassau create mode 100644 lib/pytz/zoneinfo/America/New_York create mode 100644 lib/pytz/zoneinfo/America/Nipigon create mode 100644 lib/pytz/zoneinfo/America/Nome create mode 100644 lib/pytz/zoneinfo/America/Noronha create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/Beulah create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/Center create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/New_Salem create mode 100644 lib/pytz/zoneinfo/America/Ojinaga create mode 100644 lib/pytz/zoneinfo/America/Panama create mode 100644 lib/pytz/zoneinfo/America/Pangnirtung create mode 100644 lib/pytz/zoneinfo/America/Paramaribo create mode 100644 lib/pytz/zoneinfo/America/Phoenix create mode 100644 lib/pytz/zoneinfo/America/Port-au-Prince create mode 100644 lib/pytz/zoneinfo/America/Port_of_Spain create mode 100644 lib/pytz/zoneinfo/America/Porto_Acre create mode 100644 lib/pytz/zoneinfo/America/Porto_Velho create mode 100644 lib/pytz/zoneinfo/America/Puerto_Rico create mode 100644 lib/pytz/zoneinfo/America/Rainy_River create mode 100644 lib/pytz/zoneinfo/America/Rankin_Inlet create mode 100644 lib/pytz/zoneinfo/America/Recife create mode 100644 lib/pytz/zoneinfo/America/Regina create mode 100644 lib/pytz/zoneinfo/America/Resolute create mode 100644 lib/pytz/zoneinfo/America/Rio_Branco create mode 100644 lib/pytz/zoneinfo/America/Rosario create mode 100644 lib/pytz/zoneinfo/America/Santa_Isabel create mode 100644 lib/pytz/zoneinfo/America/Santarem create mode 100644 lib/pytz/zoneinfo/America/Santiago create mode 100644 lib/pytz/zoneinfo/America/Santo_Domingo create mode 100644 lib/pytz/zoneinfo/America/Sao_Paulo create mode 100644 lib/pytz/zoneinfo/America/Scoresbysund create mode 100644 lib/pytz/zoneinfo/America/Shiprock create mode 100644 lib/pytz/zoneinfo/America/Sitka create mode 100644 lib/pytz/zoneinfo/America/St_Barthelemy create mode 100644 lib/pytz/zoneinfo/America/St_Johns create mode 100644 lib/pytz/zoneinfo/America/St_Kitts create mode 100644 lib/pytz/zoneinfo/America/St_Lucia create mode 100644 lib/pytz/zoneinfo/America/St_Thomas create mode 100644 lib/pytz/zoneinfo/America/St_Vincent create mode 100644 lib/pytz/zoneinfo/America/Swift_Current create mode 100644 lib/pytz/zoneinfo/America/Tegucigalpa create mode 100644 lib/pytz/zoneinfo/America/Thule create mode 100644 lib/pytz/zoneinfo/America/Thunder_Bay create mode 100644 lib/pytz/zoneinfo/America/Tijuana create mode 100644 lib/pytz/zoneinfo/America/Toronto create mode 100644 lib/pytz/zoneinfo/America/Tortola create mode 100644 lib/pytz/zoneinfo/America/Vancouver create mode 100644 lib/pytz/zoneinfo/America/Virgin create mode 100644 lib/pytz/zoneinfo/America/Whitehorse create mode 100644 lib/pytz/zoneinfo/America/Winnipeg create mode 100644 lib/pytz/zoneinfo/America/Yakutat create mode 100644 lib/pytz/zoneinfo/America/Yellowknife create mode 100644 lib/pytz/zoneinfo/Antarctica/Casey create mode 100644 lib/pytz/zoneinfo/Antarctica/Davis create mode 100644 lib/pytz/zoneinfo/Antarctica/DumontDUrville create mode 100644 lib/pytz/zoneinfo/Antarctica/Macquarie create mode 100644 lib/pytz/zoneinfo/Antarctica/Mawson create mode 100644 lib/pytz/zoneinfo/Antarctica/McMurdo create mode 100644 lib/pytz/zoneinfo/Antarctica/Palmer create mode 100644 lib/pytz/zoneinfo/Antarctica/Rothera create mode 100644 lib/pytz/zoneinfo/Antarctica/South_Pole create mode 100644 lib/pytz/zoneinfo/Antarctica/Syowa create mode 100644 lib/pytz/zoneinfo/Antarctica/Troll create mode 100644 lib/pytz/zoneinfo/Antarctica/Vostok create mode 100644 lib/pytz/zoneinfo/Arctic/Longyearbyen create mode 100644 lib/pytz/zoneinfo/Asia/Aden create mode 100644 lib/pytz/zoneinfo/Asia/Almaty create mode 100644 lib/pytz/zoneinfo/Asia/Amman create mode 100644 lib/pytz/zoneinfo/Asia/Anadyr create mode 100644 lib/pytz/zoneinfo/Asia/Aqtau create mode 100644 lib/pytz/zoneinfo/Asia/Aqtobe create mode 100644 lib/pytz/zoneinfo/Asia/Ashgabat create mode 100644 lib/pytz/zoneinfo/Asia/Ashkhabad create mode 100644 lib/pytz/zoneinfo/Asia/Baghdad create mode 100644 lib/pytz/zoneinfo/Asia/Bahrain create mode 100644 lib/pytz/zoneinfo/Asia/Baku create mode 100644 lib/pytz/zoneinfo/Asia/Bangkok create mode 100644 lib/pytz/zoneinfo/Asia/Beirut create mode 100644 lib/pytz/zoneinfo/Asia/Bishkek create mode 100644 lib/pytz/zoneinfo/Asia/Brunei create mode 100644 lib/pytz/zoneinfo/Asia/Calcutta create mode 100644 lib/pytz/zoneinfo/Asia/Chita create mode 100644 lib/pytz/zoneinfo/Asia/Choibalsan create mode 100644 lib/pytz/zoneinfo/Asia/Chongqing create mode 100644 lib/pytz/zoneinfo/Asia/Chungking create mode 100644 lib/pytz/zoneinfo/Asia/Colombo create mode 100644 lib/pytz/zoneinfo/Asia/Dacca create mode 100644 lib/pytz/zoneinfo/Asia/Damascus create mode 100644 lib/pytz/zoneinfo/Asia/Dhaka create mode 100644 lib/pytz/zoneinfo/Asia/Dili create mode 100644 lib/pytz/zoneinfo/Asia/Dubai create mode 100644 lib/pytz/zoneinfo/Asia/Dushanbe create mode 100644 lib/pytz/zoneinfo/Asia/Gaza create mode 100644 lib/pytz/zoneinfo/Asia/Harbin create mode 100644 lib/pytz/zoneinfo/Asia/Hebron create mode 100644 lib/pytz/zoneinfo/Asia/Ho_Chi_Minh create mode 100644 lib/pytz/zoneinfo/Asia/Hong_Kong create mode 100644 lib/pytz/zoneinfo/Asia/Hovd create mode 100644 lib/pytz/zoneinfo/Asia/Irkutsk create mode 100644 lib/pytz/zoneinfo/Asia/Istanbul create mode 100644 lib/pytz/zoneinfo/Asia/Jakarta create mode 100644 lib/pytz/zoneinfo/Asia/Jayapura create mode 100644 lib/pytz/zoneinfo/Asia/Jerusalem create mode 100644 lib/pytz/zoneinfo/Asia/Kabul create mode 100644 lib/pytz/zoneinfo/Asia/Kamchatka create mode 100644 lib/pytz/zoneinfo/Asia/Karachi create mode 100644 lib/pytz/zoneinfo/Asia/Kashgar create mode 100644 lib/pytz/zoneinfo/Asia/Kathmandu create mode 100644 lib/pytz/zoneinfo/Asia/Katmandu create mode 100644 lib/pytz/zoneinfo/Asia/Khandyga create mode 100644 lib/pytz/zoneinfo/Asia/Kolkata create mode 100644 lib/pytz/zoneinfo/Asia/Krasnoyarsk create mode 100644 lib/pytz/zoneinfo/Asia/Kuala_Lumpur create mode 100644 lib/pytz/zoneinfo/Asia/Kuching create mode 100644 lib/pytz/zoneinfo/Asia/Kuwait create mode 100644 lib/pytz/zoneinfo/Asia/Macao create mode 100644 lib/pytz/zoneinfo/Asia/Macau create mode 100644 lib/pytz/zoneinfo/Asia/Magadan create mode 100644 lib/pytz/zoneinfo/Asia/Makassar create mode 100644 lib/pytz/zoneinfo/Asia/Manila create mode 100644 lib/pytz/zoneinfo/Asia/Muscat create mode 100644 lib/pytz/zoneinfo/Asia/Nicosia create mode 100644 lib/pytz/zoneinfo/Asia/Novokuznetsk create mode 100644 lib/pytz/zoneinfo/Asia/Novosibirsk create mode 100644 lib/pytz/zoneinfo/Asia/Omsk create mode 100644 lib/pytz/zoneinfo/Asia/Oral create mode 100644 lib/pytz/zoneinfo/Asia/Phnom_Penh create mode 100644 lib/pytz/zoneinfo/Asia/Pontianak create mode 100644 lib/pytz/zoneinfo/Asia/Pyongyang create mode 100644 lib/pytz/zoneinfo/Asia/Qatar create mode 100644 lib/pytz/zoneinfo/Asia/Qyzylorda create mode 100644 lib/pytz/zoneinfo/Asia/Rangoon create mode 100644 lib/pytz/zoneinfo/Asia/Riyadh create mode 100644 lib/pytz/zoneinfo/Asia/Saigon create mode 100644 lib/pytz/zoneinfo/Asia/Sakhalin create mode 100644 lib/pytz/zoneinfo/Asia/Samarkand create mode 100644 lib/pytz/zoneinfo/Asia/Seoul create mode 100644 lib/pytz/zoneinfo/Asia/Shanghai create mode 100644 lib/pytz/zoneinfo/Asia/Singapore create mode 100644 lib/pytz/zoneinfo/Asia/Srednekolymsk create mode 100644 lib/pytz/zoneinfo/Asia/Taipei create mode 100644 lib/pytz/zoneinfo/Asia/Tashkent create mode 100644 lib/pytz/zoneinfo/Asia/Tbilisi create mode 100644 lib/pytz/zoneinfo/Asia/Tehran create mode 100644 lib/pytz/zoneinfo/Asia/Tel_Aviv create mode 100644 lib/pytz/zoneinfo/Asia/Thimbu create mode 100644 lib/pytz/zoneinfo/Asia/Thimphu create mode 100644 lib/pytz/zoneinfo/Asia/Tokyo create mode 100644 lib/pytz/zoneinfo/Asia/Ujung_Pandang create mode 100644 lib/pytz/zoneinfo/Asia/Ulaanbaatar create mode 100644 lib/pytz/zoneinfo/Asia/Ulan_Bator create mode 100644 lib/pytz/zoneinfo/Asia/Urumqi create mode 100644 lib/pytz/zoneinfo/Asia/Ust-Nera create mode 100644 lib/pytz/zoneinfo/Asia/Vientiane create mode 100644 lib/pytz/zoneinfo/Asia/Vladivostok create mode 100644 lib/pytz/zoneinfo/Asia/Yakutsk create mode 100644 lib/pytz/zoneinfo/Asia/Yekaterinburg create mode 100644 lib/pytz/zoneinfo/Asia/Yerevan create mode 100644 lib/pytz/zoneinfo/Atlantic/Azores create mode 100644 lib/pytz/zoneinfo/Atlantic/Bermuda create mode 100644 lib/pytz/zoneinfo/Atlantic/Canary create mode 100644 lib/pytz/zoneinfo/Atlantic/Cape_Verde create mode 100644 lib/pytz/zoneinfo/Atlantic/Faeroe create mode 100644 lib/pytz/zoneinfo/Atlantic/Faroe create mode 100644 lib/pytz/zoneinfo/Atlantic/Jan_Mayen create mode 100644 lib/pytz/zoneinfo/Atlantic/Madeira create mode 100644 lib/pytz/zoneinfo/Atlantic/Reykjavik create mode 100644 lib/pytz/zoneinfo/Atlantic/South_Georgia create mode 100644 lib/pytz/zoneinfo/Atlantic/St_Helena create mode 100644 lib/pytz/zoneinfo/Atlantic/Stanley create mode 100644 lib/pytz/zoneinfo/Australia/ACT create mode 100644 lib/pytz/zoneinfo/Australia/Adelaide create mode 100644 lib/pytz/zoneinfo/Australia/Brisbane create mode 100644 lib/pytz/zoneinfo/Australia/Broken_Hill create mode 100644 lib/pytz/zoneinfo/Australia/Canberra create mode 100644 lib/pytz/zoneinfo/Australia/Currie create mode 100644 lib/pytz/zoneinfo/Australia/Darwin create mode 100644 lib/pytz/zoneinfo/Australia/Eucla create mode 100644 lib/pytz/zoneinfo/Australia/Hobart create mode 100644 lib/pytz/zoneinfo/Australia/LHI create mode 100644 lib/pytz/zoneinfo/Australia/Lindeman create mode 100644 lib/pytz/zoneinfo/Australia/Lord_Howe create mode 100644 lib/pytz/zoneinfo/Australia/Melbourne create mode 100644 lib/pytz/zoneinfo/Australia/NSW create mode 100644 lib/pytz/zoneinfo/Australia/North create mode 100644 lib/pytz/zoneinfo/Australia/Perth create mode 100644 lib/pytz/zoneinfo/Australia/Queensland create mode 100644 lib/pytz/zoneinfo/Australia/South create mode 100644 lib/pytz/zoneinfo/Australia/Sydney create mode 100644 lib/pytz/zoneinfo/Australia/Tasmania create mode 100644 lib/pytz/zoneinfo/Australia/Victoria create mode 100644 lib/pytz/zoneinfo/Australia/West create mode 100644 lib/pytz/zoneinfo/Australia/Yancowinna create mode 100644 lib/pytz/zoneinfo/Brazil/Acre create mode 100644 lib/pytz/zoneinfo/Brazil/DeNoronha create mode 100644 lib/pytz/zoneinfo/Brazil/East create mode 100644 lib/pytz/zoneinfo/Brazil/West create mode 100644 lib/pytz/zoneinfo/CET create mode 100644 lib/pytz/zoneinfo/CST6CDT create mode 100644 lib/pytz/zoneinfo/Canada/Atlantic create mode 100644 lib/pytz/zoneinfo/Canada/Central create mode 100644 lib/pytz/zoneinfo/Canada/East-Saskatchewan create mode 100644 lib/pytz/zoneinfo/Canada/Eastern create mode 100644 lib/pytz/zoneinfo/Canada/Mountain create mode 100644 lib/pytz/zoneinfo/Canada/Newfoundland create mode 100644 lib/pytz/zoneinfo/Canada/Pacific create mode 100644 lib/pytz/zoneinfo/Canada/Saskatchewan create mode 100644 lib/pytz/zoneinfo/Canada/Yukon create mode 100644 lib/pytz/zoneinfo/Chile/Continental create mode 100644 lib/pytz/zoneinfo/Chile/EasterIsland create mode 100644 lib/pytz/zoneinfo/Cuba create mode 100644 lib/pytz/zoneinfo/EET create mode 100644 lib/pytz/zoneinfo/EST create mode 100644 lib/pytz/zoneinfo/EST5EDT create mode 100644 lib/pytz/zoneinfo/Egypt create mode 100644 lib/pytz/zoneinfo/Eire create mode 100644 lib/pytz/zoneinfo/Etc/GMT create mode 100644 lib/pytz/zoneinfo/Etc/GMT+0 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+1 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+10 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+11 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+12 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+2 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+3 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+4 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+5 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+6 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+7 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+8 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+9 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-0 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-1 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-10 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-11 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-12 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-13 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-14 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-2 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-3 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-4 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-5 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-6 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-7 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-8 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-9 create mode 100644 lib/pytz/zoneinfo/Etc/GMT0 create mode 100644 lib/pytz/zoneinfo/Etc/Greenwich create mode 100644 lib/pytz/zoneinfo/Etc/UCT create mode 100644 lib/pytz/zoneinfo/Etc/UTC create mode 100644 lib/pytz/zoneinfo/Etc/Universal create mode 100644 lib/pytz/zoneinfo/Etc/Zulu create mode 100644 lib/pytz/zoneinfo/Europe/Amsterdam create mode 100644 lib/pytz/zoneinfo/Europe/Andorra create mode 100644 lib/pytz/zoneinfo/Europe/Athens create mode 100644 lib/pytz/zoneinfo/Europe/Belfast create mode 100644 lib/pytz/zoneinfo/Europe/Belgrade create mode 100644 lib/pytz/zoneinfo/Europe/Berlin create mode 100644 lib/pytz/zoneinfo/Europe/Bratislava create mode 100644 lib/pytz/zoneinfo/Europe/Brussels create mode 100644 lib/pytz/zoneinfo/Europe/Bucharest create mode 100644 lib/pytz/zoneinfo/Europe/Budapest create mode 100644 lib/pytz/zoneinfo/Europe/Busingen create mode 100644 lib/pytz/zoneinfo/Europe/Chisinau create mode 100644 lib/pytz/zoneinfo/Europe/Copenhagen create mode 100644 lib/pytz/zoneinfo/Europe/Dublin create mode 100644 lib/pytz/zoneinfo/Europe/Gibraltar create mode 100644 lib/pytz/zoneinfo/Europe/Guernsey create mode 100644 lib/pytz/zoneinfo/Europe/Helsinki create mode 100644 lib/pytz/zoneinfo/Europe/Isle_of_Man create mode 100644 lib/pytz/zoneinfo/Europe/Istanbul create mode 100644 lib/pytz/zoneinfo/Europe/Jersey create mode 100644 lib/pytz/zoneinfo/Europe/Kaliningrad create mode 100644 lib/pytz/zoneinfo/Europe/Kiev create mode 100644 lib/pytz/zoneinfo/Europe/Lisbon create mode 100644 lib/pytz/zoneinfo/Europe/Ljubljana create mode 100644 lib/pytz/zoneinfo/Europe/London create mode 100644 lib/pytz/zoneinfo/Europe/Luxembourg create mode 100644 lib/pytz/zoneinfo/Europe/Madrid create mode 100644 lib/pytz/zoneinfo/Europe/Malta create mode 100644 lib/pytz/zoneinfo/Europe/Mariehamn create mode 100644 lib/pytz/zoneinfo/Europe/Minsk create mode 100644 lib/pytz/zoneinfo/Europe/Monaco create mode 100644 lib/pytz/zoneinfo/Europe/Moscow create mode 100644 lib/pytz/zoneinfo/Europe/Nicosia create mode 100644 lib/pytz/zoneinfo/Europe/Oslo create mode 100644 lib/pytz/zoneinfo/Europe/Paris create mode 100644 lib/pytz/zoneinfo/Europe/Podgorica create mode 100644 lib/pytz/zoneinfo/Europe/Prague create mode 100644 lib/pytz/zoneinfo/Europe/Riga create mode 100644 lib/pytz/zoneinfo/Europe/Rome create mode 100644 lib/pytz/zoneinfo/Europe/Samara create mode 100644 lib/pytz/zoneinfo/Europe/San_Marino create mode 100644 lib/pytz/zoneinfo/Europe/Sarajevo create mode 100644 lib/pytz/zoneinfo/Europe/Simferopol create mode 100644 lib/pytz/zoneinfo/Europe/Skopje create mode 100644 lib/pytz/zoneinfo/Europe/Sofia create mode 100644 lib/pytz/zoneinfo/Europe/Stockholm create mode 100644 lib/pytz/zoneinfo/Europe/Tallinn create mode 100644 lib/pytz/zoneinfo/Europe/Tirane create mode 100644 lib/pytz/zoneinfo/Europe/Tiraspol create mode 100644 lib/pytz/zoneinfo/Europe/Uzhgorod create mode 100644 lib/pytz/zoneinfo/Europe/Vaduz create mode 100644 lib/pytz/zoneinfo/Europe/Vatican create mode 100644 lib/pytz/zoneinfo/Europe/Vienna create mode 100644 lib/pytz/zoneinfo/Europe/Vilnius create mode 100644 lib/pytz/zoneinfo/Europe/Volgograd create mode 100644 lib/pytz/zoneinfo/Europe/Warsaw create mode 100644 lib/pytz/zoneinfo/Europe/Zagreb create mode 100644 lib/pytz/zoneinfo/Europe/Zaporozhye create mode 100644 lib/pytz/zoneinfo/Europe/Zurich create mode 100644 lib/pytz/zoneinfo/Factory create mode 100644 lib/pytz/zoneinfo/GB create mode 100644 lib/pytz/zoneinfo/GB-Eire create mode 100644 lib/pytz/zoneinfo/GMT create mode 100644 lib/pytz/zoneinfo/GMT+0 create mode 100644 lib/pytz/zoneinfo/GMT-0 create mode 100644 lib/pytz/zoneinfo/GMT0 create mode 100644 lib/pytz/zoneinfo/Greenwich create mode 100644 lib/pytz/zoneinfo/HST create mode 100644 lib/pytz/zoneinfo/Hongkong create mode 100644 lib/pytz/zoneinfo/Iceland create mode 100644 lib/pytz/zoneinfo/Indian/Antananarivo create mode 100644 lib/pytz/zoneinfo/Indian/Chagos create mode 100644 lib/pytz/zoneinfo/Indian/Christmas create mode 100644 lib/pytz/zoneinfo/Indian/Cocos create mode 100644 lib/pytz/zoneinfo/Indian/Comoro create mode 100644 lib/pytz/zoneinfo/Indian/Kerguelen create mode 100644 lib/pytz/zoneinfo/Indian/Mahe create mode 100644 lib/pytz/zoneinfo/Indian/Maldives create mode 100644 lib/pytz/zoneinfo/Indian/Mauritius create mode 100644 lib/pytz/zoneinfo/Indian/Mayotte create mode 100644 lib/pytz/zoneinfo/Indian/Reunion create mode 100644 lib/pytz/zoneinfo/Iran create mode 100644 lib/pytz/zoneinfo/Israel create mode 100644 lib/pytz/zoneinfo/Jamaica create mode 100644 lib/pytz/zoneinfo/Japan create mode 100644 lib/pytz/zoneinfo/Kwajalein create mode 100644 lib/pytz/zoneinfo/Libya create mode 100644 lib/pytz/zoneinfo/MET create mode 100644 lib/pytz/zoneinfo/MST create mode 100644 lib/pytz/zoneinfo/MST7MDT create mode 100644 lib/pytz/zoneinfo/Mexico/BajaNorte create mode 100644 lib/pytz/zoneinfo/Mexico/BajaSur create mode 100644 lib/pytz/zoneinfo/Mexico/General create mode 100644 lib/pytz/zoneinfo/NZ create mode 100644 lib/pytz/zoneinfo/NZ-CHAT create mode 100644 lib/pytz/zoneinfo/Navajo create mode 100644 lib/pytz/zoneinfo/PRC create mode 100644 lib/pytz/zoneinfo/PST8PDT create mode 100644 lib/pytz/zoneinfo/Pacific/Apia create mode 100644 lib/pytz/zoneinfo/Pacific/Auckland create mode 100644 lib/pytz/zoneinfo/Pacific/Bougainville create mode 100644 lib/pytz/zoneinfo/Pacific/Chatham create mode 100644 lib/pytz/zoneinfo/Pacific/Chuuk create mode 100644 lib/pytz/zoneinfo/Pacific/Easter create mode 100644 lib/pytz/zoneinfo/Pacific/Efate create mode 100644 lib/pytz/zoneinfo/Pacific/Enderbury create mode 100644 lib/pytz/zoneinfo/Pacific/Fakaofo create mode 100644 lib/pytz/zoneinfo/Pacific/Fiji create mode 100644 lib/pytz/zoneinfo/Pacific/Funafuti create mode 100644 lib/pytz/zoneinfo/Pacific/Galapagos create mode 100644 lib/pytz/zoneinfo/Pacific/Gambier create mode 100644 lib/pytz/zoneinfo/Pacific/Guadalcanal create mode 100644 lib/pytz/zoneinfo/Pacific/Guam create mode 100644 lib/pytz/zoneinfo/Pacific/Honolulu create mode 100644 lib/pytz/zoneinfo/Pacific/Johnston create mode 100644 lib/pytz/zoneinfo/Pacific/Kiritimati create mode 100644 lib/pytz/zoneinfo/Pacific/Kosrae create mode 100644 lib/pytz/zoneinfo/Pacific/Kwajalein create mode 100644 lib/pytz/zoneinfo/Pacific/Majuro create mode 100644 lib/pytz/zoneinfo/Pacific/Marquesas create mode 100644 lib/pytz/zoneinfo/Pacific/Midway create mode 100644 lib/pytz/zoneinfo/Pacific/Nauru create mode 100644 lib/pytz/zoneinfo/Pacific/Niue create mode 100644 lib/pytz/zoneinfo/Pacific/Norfolk create mode 100644 lib/pytz/zoneinfo/Pacific/Noumea create mode 100644 lib/pytz/zoneinfo/Pacific/Pago_Pago create mode 100644 lib/pytz/zoneinfo/Pacific/Palau create mode 100644 lib/pytz/zoneinfo/Pacific/Pitcairn create mode 100644 lib/pytz/zoneinfo/Pacific/Pohnpei create mode 100644 lib/pytz/zoneinfo/Pacific/Ponape create mode 100644 lib/pytz/zoneinfo/Pacific/Port_Moresby create mode 100644 lib/pytz/zoneinfo/Pacific/Rarotonga create mode 100644 lib/pytz/zoneinfo/Pacific/Saipan create mode 100644 lib/pytz/zoneinfo/Pacific/Samoa create mode 100644 lib/pytz/zoneinfo/Pacific/Tahiti create mode 100644 lib/pytz/zoneinfo/Pacific/Tarawa create mode 100644 lib/pytz/zoneinfo/Pacific/Tongatapu create mode 100644 lib/pytz/zoneinfo/Pacific/Truk create mode 100644 lib/pytz/zoneinfo/Pacific/Wake create mode 100644 lib/pytz/zoneinfo/Pacific/Wallis create mode 100644 lib/pytz/zoneinfo/Pacific/Yap create mode 100644 lib/pytz/zoneinfo/Poland create mode 100644 lib/pytz/zoneinfo/Portugal create mode 100644 lib/pytz/zoneinfo/ROC create mode 100644 lib/pytz/zoneinfo/ROK create mode 100644 lib/pytz/zoneinfo/Singapore create mode 100644 lib/pytz/zoneinfo/Turkey create mode 100644 lib/pytz/zoneinfo/UCT create mode 100644 lib/pytz/zoneinfo/US/Alaska create mode 100644 lib/pytz/zoneinfo/US/Aleutian create mode 100644 lib/pytz/zoneinfo/US/Arizona create mode 100644 lib/pytz/zoneinfo/US/Central create mode 100644 lib/pytz/zoneinfo/US/East-Indiana create mode 100644 lib/pytz/zoneinfo/US/Eastern create mode 100644 lib/pytz/zoneinfo/US/Hawaii create mode 100644 lib/pytz/zoneinfo/US/Indiana-Starke create mode 100644 lib/pytz/zoneinfo/US/Michigan create mode 100644 lib/pytz/zoneinfo/US/Mountain create mode 100644 lib/pytz/zoneinfo/US/Pacific create mode 100644 lib/pytz/zoneinfo/US/Pacific-New create mode 100644 lib/pytz/zoneinfo/US/Samoa create mode 100644 lib/pytz/zoneinfo/UTC create mode 100644 lib/pytz/zoneinfo/Universal create mode 100644 lib/pytz/zoneinfo/W-SU create mode 100644 lib/pytz/zoneinfo/WET create mode 100644 lib/pytz/zoneinfo/Zulu create mode 100644 lib/pytz/zoneinfo/iso3166.tab create mode 100644 lib/pytz/zoneinfo/localtime create mode 100644 lib/pytz/zoneinfo/posixrules create mode 100644 lib/pytz/zoneinfo/zone.tab create mode 100644 lib/pytz/zoneinfo/zone1970.tab create mode 100644 lib/tzlocal/LICENSE.txt create mode 100644 lib/tzlocal/README.rst create mode 100644 lib/tzlocal/__init__.py create mode 100644 lib/tzlocal/darwin.py create mode 100644 lib/tzlocal/tests.py create mode 100644 lib/tzlocal/unix.py create mode 100644 lib/tzlocal/win32.py create mode 100644 lib/tzlocal/windows_tz.py diff --git a/Mylar.py b/Mylar.py index ab6e7f15..73e9bc8e 100644 --- a/Mylar.py +++ b/Mylar.py @@ -63,7 +63,7 @@ def main(): mylar.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments - parser = argparse.ArgumentParser(description='Comic Book add-on for SABnzbd+') + parser = argparse.ArgumentParser(description='Automated Comic Book Downloader') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') diff --git a/data/interfaces/default/comicdetails.html b/data/interfaces/default/comicdetails.html old mode 100644 new mode 100755 index 4114edbf..ebf9581d --- a/data/interfaces/default/comicdetails.html +++ b/data/interfaces/default/comicdetails.html @@ -24,7 +24,7 @@ %if mylar.RENAME_FILES: Rename Files %endif - Recheck Files + Recheck Files %if mylar.ENABLE_META: Manual MetaTagging %endif @@ -299,7 +299,7 @@
Mark selected issues as - @@ -414,7 +414,7 @@ %else: - + %endif Download Manually check for issues
@@ -90,16 +93,14 @@ %if pullfilter is True: ${weekly['PUBLISHER']} - %if weekly['HAVEIT'] == 'No': - %if weekly['COMICID'] != '' and weekly['COMICID'] is not None: + %if any([weekly['HAVEIT'] == 'No', weekly['HAVEIT'] == 'OneOff']): + %if any([weekly['COMICID'] != '', weekly['COMICID'] is not None]): ${weekly['COMIC']} %else: ${weekly['COMIC']} %endif - %elif weekly['HAVEIT'] == 'OneOff': - ${weekly['COMIC']} %else: - ${weekly['COMIC']} + ${weekly['COMIC']} %endif ${weekly['ISSUE']} diff --git a/lib/apscheduler/__init__.py b/lib/apscheduler/__init__.py index 11e93a1d..89965478 100644 --- a/lib/apscheduler/__init__.py +++ b/lib/apscheduler/__init__.py @@ -1,3 +1,8 @@ -version_info = (2, 0, 0) -version = '.'.join(str(n) for n in version_info[:3]) -release = version + ''.join(str(n) for n in version_info[3:]) +# These will be removed in APScheduler 4.0. +#release = __import__('pkg_resources').get_distribution('APScheduler').version.split('-')[0] +#version_info = tuple(int(x) if x.isdigit() else x for x in release.split('.')) +#version = __version__ = '.'.join(str(x) for x in version_info[:3]) + +version_info = (3, 3, 1) +release = '3.3.1' +version = __version__ = '3.3.1' diff --git a/lib/apscheduler/events.py b/lib/apscheduler/events.py index 80bde8e6..890763eb 100644 --- a/lib/apscheduler/events.py +++ b/lib/apscheduler/events.py @@ -1,63 +1,93 @@ -__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN', - 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', - 'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED', - 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', - 'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent') +__all__ = ('EVENT_SCHEDULER_STARTED', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_SCHEDULER_PAUSED', + 'EVENT_SCHEDULER_RESUMED', 'EVENT_EXECUTOR_ADDED', 'EVENT_EXECUTOR_REMOVED', + 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED', 'EVENT_ALL_JOBS_REMOVED', + 'EVENT_JOB_ADDED', 'EVENT_JOB_REMOVED', 'EVENT_JOB_MODIFIED', 'EVENT_JOB_EXECUTED', + 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED', 'EVENT_JOB_SUBMITTED', 'EVENT_JOB_MAX_INSTANCES', + 'SchedulerEvent', 'JobEvent', 'JobExecutionEvent') -EVENT_SCHEDULER_START = 1 # The scheduler was started -EVENT_SCHEDULER_SHUTDOWN = 2 # The scheduler was shut down -EVENT_JOBSTORE_ADDED = 4 # A job store was added to the scheduler -EVENT_JOBSTORE_REMOVED = 8 # A job store was removed from the scheduler -EVENT_JOBSTORE_JOB_ADDED = 16 # A job was added to a job store -EVENT_JOBSTORE_JOB_REMOVED = 32 # A job was removed from a job store -EVENT_JOB_EXECUTED = 64 # A job was executed successfully -EVENT_JOB_ERROR = 128 # A job raised an exception during execution -EVENT_JOB_MISSED = 256 # A job's execution was missed -EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN | - EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | - EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED | - EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED) +EVENT_SCHEDULER_STARTED = EVENT_SCHEDULER_START = 2 ** 0 +EVENT_SCHEDULER_SHUTDOWN = 2 ** 1 +EVENT_SCHEDULER_PAUSED = 2 ** 2 +EVENT_SCHEDULER_RESUMED = 2 ** 3 +EVENT_EXECUTOR_ADDED = 2 ** 4 +EVENT_EXECUTOR_REMOVED = 2 ** 5 +EVENT_JOBSTORE_ADDED = 2 ** 6 +EVENT_JOBSTORE_REMOVED = 2 ** 7 +EVENT_ALL_JOBS_REMOVED = 2 ** 8 +EVENT_JOB_ADDED = 2 ** 9 +EVENT_JOB_REMOVED = 2 ** 10 +EVENT_JOB_MODIFIED = 2 ** 11 +EVENT_JOB_EXECUTED = 2 ** 12 +EVENT_JOB_ERROR = 2 ** 13 +EVENT_JOB_MISSED = 2 ** 14 +EVENT_JOB_SUBMITTED = 2 ** 15 +EVENT_JOB_MAX_INSTANCES = 2 ** 16 +EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULER_PAUSED | + EVENT_SCHEDULER_RESUMED | EVENT_EXECUTOR_ADDED | EVENT_EXECUTOR_REMOVED | + EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED | EVENT_ALL_JOBS_REMOVED | + EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | + EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES) class SchedulerEvent(object): """ An event that concerns the scheduler itself. - :var code: the type code of this event + :ivar code: the type code of this event + :ivar alias: alias of the job store or executor that was added or removed (if applicable) """ - def __init__(self, code): + + def __init__(self, code, alias=None): + super(SchedulerEvent, self).__init__() self.code = code - - -class JobStoreEvent(SchedulerEvent): - """ - An event that concerns job stores. - - :var alias: the alias of the job store involved - :var job: the new job if a job was added - """ - def __init__(self, code, alias, job=None): - SchedulerEvent.__init__(self, code) self.alias = alias - if job: - self.job = job + + def __repr__(self): + return '<%s (code=%d)>' % (self.__class__.__name__, self.code) class JobEvent(SchedulerEvent): """ - An event that concerns the execution of individual jobs. + An event that concerns a job. - :var job: the job instance in question - :var scheduled_run_time: the time when the job was scheduled to be run - :var retval: the return value of the successfully executed job - :var exception: the exception raised by the job - :var traceback: the traceback object associated with the exception + :ivar code: the type code of this event + :ivar job_id: identifier of the job in question + :ivar jobstore: alias of the job store containing the job in question """ - def __init__(self, code, job, scheduled_run_time, retval=None, - exception=None, traceback=None): - SchedulerEvent.__init__(self, code) - self.job = job + + def __init__(self, code, job_id, jobstore): + super(JobEvent, self).__init__(code) + self.code = code + self.job_id = job_id + self.jobstore = jobstore + + +class JobSubmissionEvent(JobEvent): + """ + An event that concerns the submission of a job to its executor. + + :ivar scheduled_run_times: a list of datetimes when the job was intended to run + """ + + def __init__(self, code, job_id, jobstore, scheduled_run_times): + super(JobSubmissionEvent, self).__init__(code, job_id, jobstore) + self.scheduled_run_times = scheduled_run_times + + +class JobExecutionEvent(JobEvent): + """ + An event that concerns the running of a job within its executor. + + :ivar scheduled_run_time: the time when the job was scheduled to be run + :ivar retval: the return value of the successfully executed job + :ivar exception: the exception raised by the job + :ivar traceback: a formatted traceback for the exception + """ + + def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None, + traceback=None): + super(JobExecutionEvent, self).__init__(code, job_id, jobstore) self.scheduled_run_time = scheduled_run_time self.retval = retval self.exception = exception diff --git a/lib/apscheduler/executors/__init__.py b/lib/apscheduler/executors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/lib/apscheduler/executors/asyncio.py b/lib/apscheduler/executors/asyncio.py new file mode 100644 index 00000000..44794295 --- /dev/null +++ b/lib/apscheduler/executors/asyncio.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + +try: + from asyncio import iscoroutinefunction + from apscheduler.executors.base_py3 import run_coroutine_job +except ImportError: + from trollius import iscoroutinefunction + run_coroutine_job = None + + +class AsyncIOExecutor(BaseExecutor): + """ + Runs jobs in the default executor of the event loop. + + If the job function is a native coroutine function, it is scheduled to be run directly in the + event loop as soon as possible. All other functions are run in the event loop's default + executor which is usually a thread pool. + + Plugin alias: ``asyncio`` + """ + + def start(self, scheduler, alias): + super(AsyncIOExecutor, self).start(scheduler, alias) + self._eventloop = scheduler._eventloop + + def _do_submit_job(self, job, run_times): + def callback(f): + try: + events = f.result() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + if iscoroutinefunction(job.func): + if run_coroutine_job is not None: + coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name) + f = self._eventloop.create_task(coro) + else: + raise Exception('Executing coroutine based jobs is not supported with Trollius') + else: + f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times, + self._logger.name) + + f.add_done_callback(callback) diff --git a/lib/apscheduler/executors/base.py b/lib/apscheduler/executors/base.py new file mode 100644 index 00000000..b36a86fc --- /dev/null +++ b/lib/apscheduler/executors/base.py @@ -0,0 +1,137 @@ +from abc import ABCMeta, abstractmethod +from collections import defaultdict +from datetime import datetime, timedelta +from traceback import format_tb +import logging +import sys + +from pytz import utc +import six + +from apscheduler.events import ( + JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED) + + +class MaxInstancesReachedError(Exception): + def __init__(self, job): + super(MaxInstancesReachedError, self).__init__( + 'Job "%s" has already reached its maximum number of instances (%d)' % + (job.id, job.max_instances)) + + +class BaseExecutor(six.with_metaclass(ABCMeta, object)): + """Abstract base class that defines the interface that every executor must implement.""" + + _scheduler = None + _lock = None + _logger = logging.getLogger('apscheduler.executors') + + def __init__(self): + super(BaseExecutor, self).__init__() + self._instances = defaultdict(lambda: 0) + + def start(self, scheduler, alias): + """ + Called by the scheduler when the scheduler is being started or when the executor is being + added to an already running scheduler. + + :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting + this executor + :param str|unicode alias: alias of this executor as it was assigned to the scheduler + + """ + self._scheduler = scheduler + self._lock = scheduler._create_lock() + self._logger = logging.getLogger('apscheduler.executors.%s' % alias) + + def shutdown(self, wait=True): + """ + Shuts down this executor. + + :param bool wait: ``True`` to wait until all submitted jobs + have been executed + """ + + def submit_job(self, job, run_times): + """ + Submits job for execution. + + :param Job job: job to execute + :param list[datetime] run_times: list of datetimes specifying + when the job should have been run + :raises MaxInstancesReachedError: if the maximum number of + allowed instances for this job has been reached + + """ + assert self._lock is not None, 'This executor has not been started yet' + with self._lock: + if self._instances[job.id] >= job.max_instances: + raise MaxInstancesReachedError(job) + + self._do_submit_job(job, run_times) + self._instances[job.id] += 1 + + @abstractmethod + def _do_submit_job(self, job, run_times): + """Performs the actual task of scheduling `run_job` to be called.""" + + def _run_job_success(self, job_id, events): + """ + Called by the executor with the list of generated events when :func:`run_job` has been + successfully called. + + """ + with self._lock: + self._instances[job_id] -= 1 + if self._instances[job_id] == 0: + del self._instances[job_id] + + for event in events: + self._scheduler._dispatch_event(event) + + def _run_job_error(self, job_id, exc, traceback=None): + """Called by the executor with the exception if there is an error calling `run_job`.""" + with self._lock: + self._instances[job_id] -= 1 + if self._instances[job_id] == 0: + del self._instances[job_id] + + exc_info = (exc.__class__, exc, traceback) + self._logger.error('Error running job %s', job_id, exc_info=exc_info) + + +def run_job(job, jobstore_alias, run_times, logger_name): + """ + Called by executors to run the job. Returns a list of scheduler events to be dispatched by the + scheduler. + + """ + events = [] + logger = logging.getLogger(logger_name) + for run_time in run_times: + # See if the job missed its run time window, and handle + # possible misfires accordingly + if job.misfire_grace_time is not None: + difference = datetime.now(utc) - run_time + grace_time = timedelta(seconds=job.misfire_grace_time) + if difference > grace_time: + events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, + run_time)) + logger.warning('Run time of job "%s" was missed by %s', job, difference) + continue + + logger.info('Running job "%s" (scheduled at %s)', job, run_time) + try: + retval = job.func(*job.args, **job.kwargs) + except: + exc, tb = sys.exc_info()[1:] + formatted_tb = ''.join(format_tb(tb)) + events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, + exception=exc, traceback=formatted_tb)) + logger.exception('Job "%s" raised an exception', job) + else: + events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, + retval=retval)) + logger.info('Job "%s" executed successfully', job) + + return events diff --git a/lib/apscheduler/executors/base_py3.py b/lib/apscheduler/executors/base_py3.py new file mode 100644 index 00000000..47124258 --- /dev/null +++ b/lib/apscheduler/executors/base_py3.py @@ -0,0 +1,41 @@ +import logging +import sys +from datetime import datetime, timedelta +from traceback import format_tb + +from pytz import utc + +from apscheduler.events import ( + JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED) + + +async def run_coroutine_job(job, jobstore_alias, run_times, logger_name): + """Coroutine version of run_job().""" + events = [] + logger = logging.getLogger(logger_name) + for run_time in run_times: + # See if the job missed its run time window, and handle possible misfires accordingly + if job.misfire_grace_time is not None: + difference = datetime.now(utc) - run_time + grace_time = timedelta(seconds=job.misfire_grace_time) + if difference > grace_time: + events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias, + run_time)) + logger.warning('Run time of job "%s" was missed by %s', job, difference) + continue + + logger.info('Running job "%s" (scheduled at %s)', job, run_time) + try: + retval = await job.func(*job.args, **job.kwargs) + except: + exc, tb = sys.exc_info()[1:] + formatted_tb = ''.join(format_tb(tb)) + events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time, + exception=exc, traceback=formatted_tb)) + logger.exception('Job "%s" raised an exception', job) + else: + events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time, + retval=retval)) + logger.info('Job "%s" executed successfully', job) + + return events diff --git a/lib/apscheduler/executors/debug.py b/lib/apscheduler/executors/debug.py new file mode 100644 index 00000000..f6454d52 --- /dev/null +++ b/lib/apscheduler/executors/debug.py @@ -0,0 +1,20 @@ +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + + +class DebugExecutor(BaseExecutor): + """ + A special executor that executes the target callable directly instead of deferring it to a + thread or process. + + Plugin alias: ``debug`` + """ + + def _do_submit_job(self, job, run_times): + try: + events = run_job(job, job._jobstore_alias, run_times, self._logger.name) + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) diff --git a/lib/apscheduler/executors/gevent.py b/lib/apscheduler/executors/gevent.py new file mode 100644 index 00000000..a12b806a --- /dev/null +++ b/lib/apscheduler/executors/gevent.py @@ -0,0 +1,30 @@ +from __future__ import absolute_import +import sys + +from apscheduler.executors.base import BaseExecutor, run_job + + +try: + import gevent +except ImportError: # pragma: nocover + raise ImportError('GeventExecutor requires gevent installed') + + +class GeventExecutor(BaseExecutor): + """ + Runs jobs as greenlets. + + Plugin alias: ``gevent`` + """ + + def _do_submit_job(self, job, run_times): + def callback(greenlet): + try: + events = greenlet.get() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + gevent.spawn(run_job, job, job._jobstore_alias, run_times, self._logger.name).\ + link(callback) diff --git a/lib/apscheduler/executors/pool.py b/lib/apscheduler/executors/pool.py new file mode 100644 index 00000000..2f4ef455 --- /dev/null +++ b/lib/apscheduler/executors/pool.py @@ -0,0 +1,54 @@ +from abc import abstractmethod +import concurrent.futures + +from apscheduler.executors.base import BaseExecutor, run_job + + +class BasePoolExecutor(BaseExecutor): + @abstractmethod + def __init__(self, pool): + super(BasePoolExecutor, self).__init__() + self._pool = pool + + def _do_submit_job(self, job, run_times): + def callback(f): + exc, tb = (f.exception_info() if hasattr(f, 'exception_info') else + (f.exception(), getattr(f.exception(), '__traceback__', None))) + if exc: + self._run_job_error(job.id, exc, tb) + else: + self._run_job_success(job.id, f.result()) + + f = self._pool.submit(run_job, job, job._jobstore_alias, run_times, self._logger.name) + f.add_done_callback(callback) + + def shutdown(self, wait=True): + self._pool.shutdown(wait) + + +class ThreadPoolExecutor(BasePoolExecutor): + """ + An executor that runs jobs in a concurrent.futures thread pool. + + Plugin alias: ``threadpool`` + + :param max_workers: the maximum number of spawned threads. + """ + + def __init__(self, max_workers=10): + pool = concurrent.futures.ThreadPoolExecutor(int(max_workers)) + super(ThreadPoolExecutor, self).__init__(pool) + + +class ProcessPoolExecutor(BasePoolExecutor): + """ + An executor that runs jobs in a concurrent.futures process pool. + + Plugin alias: ``processpool`` + + :param max_workers: the maximum number of spawned processes. + """ + + def __init__(self, max_workers=10): + pool = concurrent.futures.ProcessPoolExecutor(int(max_workers)) + super(ProcessPoolExecutor, self).__init__(pool) diff --git a/lib/apscheduler/executors/tornado.py b/lib/apscheduler/executors/tornado.py new file mode 100644 index 00000000..6519c3e8 --- /dev/null +++ b/lib/apscheduler/executors/tornado.py @@ -0,0 +1,54 @@ +from __future__ import absolute_import + +import sys +from concurrent.futures import ThreadPoolExecutor + +from tornado.gen import convert_yielded + +from apscheduler.executors.base import BaseExecutor, run_job + +try: + from inspect import iscoroutinefunction + from apscheduler.executors.base_py3 import run_coroutine_job +except ImportError: + def iscoroutinefunction(func): + return False + + +class TornadoExecutor(BaseExecutor): + """ + Runs jobs either in a thread pool or directly on the I/O loop. + + If the job function is a native coroutine function, it is scheduled to be run directly in the + I/O loop as soon as possible. All other functions are run in a thread pool. + + Plugin alias: ``tornado`` + + :param int max_workers: maximum number of worker threads in the thread pool + """ + + def __init__(self, max_workers=10): + super(TornadoExecutor, self).__init__() + self.executor = ThreadPoolExecutor(max_workers) + + def start(self, scheduler, alias): + super(TornadoExecutor, self).start(scheduler, alias) + self._ioloop = scheduler._ioloop + + def _do_submit_job(self, job, run_times): + def callback(f): + try: + events = f.result() + except: + self._run_job_error(job.id, *sys.exc_info()[1:]) + else: + self._run_job_success(job.id, events) + + if iscoroutinefunction(job.func): + f = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name) + else: + f = self.executor.submit(run_job, job, job._jobstore_alias, run_times, + self._logger.name) + + f = convert_yielded(f) + f.add_done_callback(callback) diff --git a/lib/apscheduler/executors/twisted.py b/lib/apscheduler/executors/twisted.py new file mode 100644 index 00000000..c7bcf647 --- /dev/null +++ b/lib/apscheduler/executors/twisted.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import + +from apscheduler.executors.base import BaseExecutor, run_job + + +class TwistedExecutor(BaseExecutor): + """ + Runs jobs in the reactor's thread pool. + + Plugin alias: ``twisted`` + """ + + def start(self, scheduler, alias): + super(TwistedExecutor, self).start(scheduler, alias) + self._reactor = scheduler._reactor + + def _do_submit_job(self, job, run_times): + def callback(success, result): + if success: + self._run_job_success(job.id, result) + else: + self._run_job_error(job.id, result.value, result.tb) + + self._reactor.getThreadPool().callInThreadWithCallback( + callback, run_job, job, job._jobstore_alias, run_times, self._logger.name) diff --git a/lib/apscheduler/job.py b/lib/apscheduler/job.py index 868e7234..b9c305db 100644 --- a/lib/apscheduler/job.py +++ b/lib/apscheduler/job.py @@ -1,134 +1,289 @@ -""" -Jobs represent scheduled tasks. -""" +from collections import Iterable, Mapping +from uuid import uuid4 -from threading import Lock -from datetime import timedelta +import six -from apscheduler.util import to_unicode, ref_to_obj, get_callable_name,\ - obj_to_ref - - -class MaxInstancesReachedError(Exception): - pass +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import ( + ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args, + convert_to_datetime) class Job(object): """ - Encapsulates the actual Job along with its metadata. Job instances - are created by the scheduler when adding jobs, and it should not be - directly instantiated. + Contains the options given when scheduling callables and its current schedule and other state. + This class should never be instantiated by the user. - :param trigger: trigger that determines the execution times - :param func: callable to call when the trigger is triggered - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job (optional) - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :param coalesce: run once instead of many times if the scheduler determines - that the job should be run more than once in succession - :param max_runs: maximum number of times this job is allowed to be - triggered - :param max_instances: maximum number of concurrently running - instances allowed for this job + :var str id: the unique identifier of this job + :var str name: the description of this job + :var func: the callable to execute + :var tuple|list args: positional arguments to the callable + :var dict kwargs: keyword arguments to the callable + :var bool coalesce: whether to only run the job once when several run times are due + :var trigger: the trigger object that controls the schedule of this job + :var str executor: the name of the executor that will run this job + :var int misfire_grace_time: the time (in seconds) how much this job's execution is allowed to + be late + :var int max_instances: the maximum number of concurrently executing instances allowed for this + job + :var datetime.datetime next_run_time: the next scheduled run time of this job + + .. note:: + The ``misfire_grace_time`` has some non-obvious effects on job execution. See the + :ref:`missed-job-executions` section in the documentation for an in-depth explanation. """ - id = None - next_run_time = None - def __init__(self, trigger, func, args, kwargs, misfire_grace_time, - coalesce, name=None, max_runs=None, max_instances=1): - if not trigger: - raise ValueError('The trigger must not be None') - if not hasattr(func, '__call__'): - raise TypeError('func must be callable') - if not hasattr(args, '__getitem__'): - raise TypeError('args must be a list-like object') - if not hasattr(kwargs, '__getitem__'): - raise TypeError('kwargs must be a dict-like object') - if misfire_grace_time <= 0: - raise ValueError('misfire_grace_time must be a positive value') - if max_runs is not None and max_runs <= 0: - raise ValueError('max_runs must be a positive value') - if max_instances <= 0: - raise ValueError('max_instances must be a positive value') + __slots__ = ('_scheduler', '_jobstore_alias', 'id', 'trigger', 'executor', 'func', 'func_ref', + 'args', 'kwargs', 'name', 'misfire_grace_time', 'coalesce', 'max_instances', + 'next_run_time') - self._lock = Lock() + def __init__(self, scheduler, id=None, **kwargs): + super(Job, self).__init__() + self._scheduler = scheduler + self._jobstore_alias = None + self._modify(id=id or uuid4().hex, **kwargs) - self.trigger = trigger - self.func = func - self.args = args - self.kwargs = kwargs - self.name = to_unicode(name or get_callable_name(func)) - self.misfire_grace_time = misfire_grace_time - self.coalesce = coalesce - self.max_runs = max_runs - self.max_instances = max_instances - self.runs = 0 - self.instances = 0 - - def compute_next_run_time(self, now): - if self.runs == self.max_runs: - self.next_run_time = None - else: - self.next_run_time = self.trigger.get_next_fire_time(now) - - return self.next_run_time - - def get_run_times(self, now): + def modify(self, **changes): """ - Computes the scheduled run times between ``next_run_time`` and ``now``. + Makes the given changes to this job and saves it in the associated job store. + + Accepted keyword arguments are the same as the variables on this class. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.modify_job` + + :return Job: this job instance + + """ + self._scheduler.modify_job(self.id, self._jobstore_alias, **changes) + return self + + def reschedule(self, trigger, **trigger_args): + """ + Shortcut for switching the trigger on this job. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.reschedule_job` + + :return Job: this job instance + + """ + self._scheduler.reschedule_job(self.id, self._jobstore_alias, trigger, **trigger_args) + return self + + def pause(self): + """ + Temporarily suspend the execution of this job. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.pause_job` + + :return Job: this job instance + + """ + self._scheduler.pause_job(self.id, self._jobstore_alias) + return self + + def resume(self): + """ + Resume the schedule of this job if previously paused. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.resume_job` + + :return Job: this job instance + + """ + self._scheduler.resume_job(self.id, self._jobstore_alias) + return self + + def remove(self): + """ + Unschedules this job and removes it from its associated job store. + + .. seealso:: :meth:`~apscheduler.schedulers.base.BaseScheduler.remove_job` + + """ + self._scheduler.remove_job(self.id, self._jobstore_alias) + + @property + def pending(self): + """ + Returns ``True`` if the referenced job is still waiting to be added to its designated job + store. + + """ + return self._jobstore_alias is None + + # + # Private API + # + + def _get_run_times(self, now): + """ + Computes the scheduled run times between ``next_run_time`` and ``now`` (inclusive). + + :type now: datetime.datetime + :rtype: list[datetime.datetime] + """ run_times = [] - run_time = self.next_run_time - increment = timedelta(microseconds=1) - while ((not self.max_runs or self.runs < self.max_runs) and - run_time and run_time <= now): - run_times.append(run_time) - run_time = self.trigger.get_next_fire_time(run_time + increment) + next_run_time = self.next_run_time + while next_run_time and next_run_time <= now: + run_times.append(next_run_time) + next_run_time = self.trigger.get_next_fire_time(next_run_time, now) return run_times - def add_instance(self): - self._lock.acquire() - try: - if self.instances == self.max_instances: - raise MaxInstancesReachedError - self.instances += 1 - finally: - self._lock.release() + def _modify(self, **changes): + """ + Validates the changes to the Job and makes the modifications if and only if all of them + validate. - def remove_instance(self): - self._lock.acquire() - try: - assert self.instances > 0, 'Already at 0 instances' - self.instances -= 1 - finally: - self._lock.release() + """ + approved = {} + + if 'id' in changes: + value = changes.pop('id') + if not isinstance(value, six.string_types): + raise TypeError("id must be a nonempty string") + if hasattr(self, 'id'): + raise ValueError('The job ID may not be changed') + approved['id'] = value + + if 'func' in changes or 'args' in changes or 'kwargs' in changes: + func = changes.pop('func') if 'func' in changes else self.func + args = changes.pop('args') if 'args' in changes else self.args + kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs + + if isinstance(func, six.string_types): + func_ref = func + func = ref_to_obj(func) + elif callable(func): + try: + func_ref = obj_to_ref(func) + except ValueError: + # If this happens, this Job won't be serializable + func_ref = None + else: + raise TypeError('func must be a callable or a textual reference to one') + + if not hasattr(self, 'name') and changes.get('name', None) is None: + changes['name'] = get_callable_name(func) + + if isinstance(args, six.string_types) or not isinstance(args, Iterable): + raise TypeError('args must be a non-string iterable') + if isinstance(kwargs, six.string_types) or not isinstance(kwargs, Mapping): + raise TypeError('kwargs must be a dict-like object') + + check_callable_args(func, args, kwargs) + + approved['func'] = func + approved['func_ref'] = func_ref + approved['args'] = args + approved['kwargs'] = kwargs + + if 'name' in changes: + value = changes.pop('name') + if not value or not isinstance(value, six.string_types): + raise TypeError("name must be a nonempty string") + approved['name'] = value + + if 'misfire_grace_time' in changes: + value = changes.pop('misfire_grace_time') + if value is not None and (not isinstance(value, six.integer_types) or value <= 0): + raise TypeError('misfire_grace_time must be either None or a positive integer') + approved['misfire_grace_time'] = value + + if 'coalesce' in changes: + value = bool(changes.pop('coalesce')) + approved['coalesce'] = value + + if 'max_instances' in changes: + value = changes.pop('max_instances') + if not isinstance(value, six.integer_types) or value <= 0: + raise TypeError('max_instances must be a positive integer') + approved['max_instances'] = value + + if 'trigger' in changes: + trigger = changes.pop('trigger') + if not isinstance(trigger, BaseTrigger): + raise TypeError('Expected a trigger instance, got %s instead' % + trigger.__class__.__name__) + + approved['trigger'] = trigger + + if 'executor' in changes: + value = changes.pop('executor') + if not isinstance(value, six.string_types): + raise TypeError('executor must be a string') + approved['executor'] = value + + if 'next_run_time' in changes: + value = changes.pop('next_run_time') + approved['next_run_time'] = convert_to_datetime(value, self._scheduler.timezone, + 'next_run_time') + + if changes: + raise AttributeError('The following are not modifiable attributes of Job: %s' % + ', '.join(changes)) + + for key, value in six.iteritems(approved): + setattr(self, key, value) def __getstate__(self): - # Prevents the unwanted pickling of transient or unpicklable variables - state = self.__dict__.copy() - state.pop('instances', None) - state.pop('func', None) - state.pop('_lock', None) - state['func_ref'] = obj_to_ref(self.func) - return state + # Don't allow this Job to be serialized if the function reference could not be determined + if not self.func_ref: + raise ValueError( + 'This Job cannot be serialized since the reference to its callable (%r) could not ' + 'be determined. Consider giving a textual reference (module:function name) ' + 'instead.' % (self.func,)) + + return { + 'version': 1, + 'id': self.id, + 'func': self.func_ref, + 'trigger': self.trigger, + 'executor': self.executor, + 'args': self.args, + 'kwargs': self.kwargs, + 'name': self.name, + 'misfire_grace_time': self.misfire_grace_time, + 'coalesce': self.coalesce, + 'max_instances': self.max_instances, + 'next_run_time': self.next_run_time + } def __setstate__(self, state): - state['instances'] = 0 - state['func'] = ref_to_obj(state.pop('func_ref')) - state['_lock'] = Lock() - self.__dict__ = state + if state.get('version', 1) > 1: + raise ValueError('Job has version %s, but only version 1 can be handled' % + state['version']) + + self.id = state['id'] + self.func_ref = state['func'] + self.func = ref_to_obj(self.func_ref) + self.trigger = state['trigger'] + self.executor = state['executor'] + self.args = state['args'] + self.kwargs = state['kwargs'] + self.name = state['name'] + self.misfire_grace_time = state['misfire_grace_time'] + self.coalesce = state['coalesce'] + self.max_instances = state['max_instances'] + self.next_run_time = state['next_run_time'] def __eq__(self, other): if isinstance(other, Job): - return self.id is not None and other.id == self.id or self is other + return self.id == other.id return NotImplemented def __repr__(self): - return '' % (self.name, repr(self.trigger)) + return '' % (repr_escape(self.id), repr_escape(self.name)) def __str__(self): - return '%s (trigger: %s, next run at: %s)' % (self.name, - str(self.trigger), str(self.next_run_time)) + return repr_escape(self.__unicode__()) + + def __unicode__(self): + if hasattr(self, 'next_run_time'): + status = ('next run at: ' + datetime_repr(self.next_run_time) if + self.next_run_time else 'paused') + else: + status = 'pending' + + return u'%s (trigger: %s, %s)' % (self.name, self.trigger, status) diff --git a/lib/apscheduler/jobstores/base.py b/lib/apscheduler/jobstores/base.py index f0a16ddb..9cff66c4 100644 --- a/lib/apscheduler/jobstores/base.py +++ b/lib/apscheduler/jobstores/base.py @@ -1,25 +1,143 @@ -""" -Abstract base class that provides the interface needed by all job stores. -Job store methods are also documented here. -""" +from abc import ABCMeta, abstractmethod +import logging + +import six -class JobStore(object): - def add_job(self, job): - """Adds the given job from this store.""" - raise NotImplementedError +class JobLookupError(KeyError): + """Raised when the job store cannot find a job for update or removal.""" - def update_job(self, job): - """Persists the running state of the given job.""" - raise NotImplementedError + def __init__(self, job_id): + super(JobLookupError, self).__init__(u'No job by the id of %s was found' % job_id) - def remove_job(self, job): - """Removes the given jobs from this store.""" - raise NotImplementedError - def load_jobs(self): - """Loads jobs from this store into memory.""" - raise NotImplementedError +class ConflictingIdError(KeyError): + """Raised when the uniqueness of job IDs is being violated.""" - def close(self): + def __init__(self, job_id): + super(ConflictingIdError, self).__init__( + u'Job identifier (%s) conflicts with an existing job' % job_id) + + +class TransientJobError(ValueError): + """ + Raised when an attempt to add transient (with no func_ref) job to a persistent job store is + detected. + """ + + def __init__(self, job_id): + super(TransientJobError, self).__init__( + u'Job (%s) cannot be added to this job store because a reference to the callable ' + u'could not be determined.' % job_id) + + +class BaseJobStore(six.with_metaclass(ABCMeta)): + """Abstract base class that defines the interface that every job store must implement.""" + + _scheduler = None + _alias = None + _logger = logging.getLogger('apscheduler.jobstores') + + def start(self, scheduler, alias): + """ + Called by the scheduler when the scheduler is being started or when the job store is being + added to an already running scheduler. + + :param apscheduler.schedulers.base.BaseScheduler scheduler: the scheduler that is starting + this job store + :param str|unicode alias: alias of this job store as it was assigned to the scheduler + """ + + self._scheduler = scheduler + self._alias = alias + self._logger = logging.getLogger('apscheduler.jobstores.%s' % alias) + + def shutdown(self): """Frees any resources still bound to this job store.""" + + def _fix_paused_jobs_sorting(self, jobs): + for i, job in enumerate(jobs): + if job.next_run_time is not None: + if i > 0: + paused_jobs = jobs[:i] + del jobs[:i] + jobs.extend(paused_jobs) + break + + @abstractmethod + def lookup_job(self, job_id): + """ + Returns a specific job, or ``None`` if it isn't found.. + + The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of + the returned job to point to the scheduler and itself, respectively. + + :param str|unicode job_id: identifier of the job + :rtype: Job + """ + + @abstractmethod + def get_due_jobs(self, now): + """ + Returns the list of jobs that have ``next_run_time`` earlier or equal to ``now``. + The returned jobs must be sorted by next run time (ascending). + + :param datetime.datetime now: the current (timezone aware) datetime + :rtype: list[Job] + """ + + @abstractmethod + def get_next_run_time(self): + """ + Returns the earliest run time of all the jobs stored in this job store, or ``None`` if + there are no active jobs. + + :rtype: datetime.datetime + """ + + @abstractmethod + def get_all_jobs(self): + """ + Returns a list of all jobs in this job store. + The returned jobs should be sorted by next run time (ascending). + Paused jobs (next_run_time == None) should be sorted last. + + The job store is responsible for setting the ``scheduler`` and ``jobstore`` attributes of + the returned jobs to point to the scheduler and itself, respectively. + + :rtype: list[Job] + """ + + @abstractmethod + def add_job(self, job): + """ + Adds the given job to this store. + + :param Job job: the job to add + :raises ConflictingIdError: if there is another job in this store with the same ID + """ + + @abstractmethod + def update_job(self, job): + """ + Replaces the job in the store with the given newer version. + + :param Job job: the job to update + :raises JobLookupError: if the job does not exist + """ + + @abstractmethod + def remove_job(self, job_id): + """ + Removes the given job from this store. + + :param str|unicode job_id: identifier of the job + :raises JobLookupError: if the job does not exist + """ + + @abstractmethod + def remove_all_jobs(self): + """Removes all jobs from this store.""" + + def __repr__(self): + return '<%s>' % self.__class__.__name__ diff --git a/lib/apscheduler/jobstores/memory.py b/lib/apscheduler/jobstores/memory.py new file mode 100644 index 00000000..abfe7c6c --- /dev/null +++ b/lib/apscheduler/jobstores/memory.py @@ -0,0 +1,108 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import datetime_to_utc_timestamp + + +class MemoryJobStore(BaseJobStore): + """ + Stores jobs in an array in RAM. Provides no persistence support. + + Plugin alias: ``memory`` + """ + + def __init__(self): + super(MemoryJobStore, self).__init__() + # list of (job, timestamp), sorted by next_run_time and job id (ascending) + self._jobs = [] + self._jobs_index = {} # id -> (job, timestamp) lookup table + + def lookup_job(self, job_id): + return self._jobs_index.get(job_id, (None, None))[0] + + def get_due_jobs(self, now): + now_timestamp = datetime_to_utc_timestamp(now) + pending = [] + for job, timestamp in self._jobs: + if timestamp is None or timestamp > now_timestamp: + break + pending.append(job) + + return pending + + def get_next_run_time(self): + return self._jobs[0][0].next_run_time if self._jobs else None + + def get_all_jobs(self): + return [j[0] for j in self._jobs] + + def add_job(self, job): + if job.id in self._jobs_index: + raise ConflictingIdError(job.id) + + timestamp = datetime_to_utc_timestamp(job.next_run_time) + index = self._get_job_index(timestamp, job.id) + self._jobs.insert(index, (job, timestamp)) + self._jobs_index[job.id] = (job, timestamp) + + def update_job(self, job): + old_job, old_timestamp = self._jobs_index.get(job.id, (None, None)) + if old_job is None: + raise JobLookupError(job.id) + + # If the next run time has not changed, simply replace the job in its present index. + # Otherwise, reinsert the job to the list to preserve the ordering. + old_index = self._get_job_index(old_timestamp, old_job.id) + new_timestamp = datetime_to_utc_timestamp(job.next_run_time) + if old_timestamp == new_timestamp: + self._jobs[old_index] = (job, new_timestamp) + else: + del self._jobs[old_index] + new_index = self._get_job_index(new_timestamp, job.id) + self._jobs.insert(new_index, (job, new_timestamp)) + + self._jobs_index[old_job.id] = (job, new_timestamp) + + def remove_job(self, job_id): + job, timestamp = self._jobs_index.get(job_id, (None, None)) + if job is None: + raise JobLookupError(job_id) + + index = self._get_job_index(timestamp, job_id) + del self._jobs[index] + del self._jobs_index[job.id] + + def remove_all_jobs(self): + self._jobs = [] + self._jobs_index = {} + + def shutdown(self): + self.remove_all_jobs() + + def _get_job_index(self, timestamp, job_id): + """ + Returns the index of the given job, or if it's not found, the index where the job should be + inserted based on the given timestamp. + + :type timestamp: int + :type job_id: str + + """ + lo, hi = 0, len(self._jobs) + timestamp = float('inf') if timestamp is None else timestamp + while lo < hi: + mid = (lo + hi) // 2 + mid_job, mid_timestamp = self._jobs[mid] + mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp + if mid_timestamp > timestamp: + hi = mid + elif mid_timestamp < timestamp: + lo = mid + 1 + elif mid_job.id > job_id: + hi = mid + elif mid_job.id < job_id: + lo = mid + 1 + else: + return mid + + return lo diff --git a/lib/apscheduler/jobstores/mongodb.py b/lib/apscheduler/jobstores/mongodb.py new file mode 100644 index 00000000..fc88325f --- /dev/null +++ b/lib/apscheduler/jobstores/mongodb.py @@ -0,0 +1,141 @@ +from __future__ import absolute_import +import warnings + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from bson.binary import Binary + from pymongo.errors import DuplicateKeyError + from pymongo import MongoClient, ASCENDING +except ImportError: # pragma: nocover + raise ImportError('MongoDBJobStore requires PyMongo installed') + + +class MongoDBJobStore(BaseJobStore): + """ + Stores jobs in a MongoDB database. Any leftover keyword arguments are directly passed to + pymongo's `MongoClient + `_. + + Plugin alias: ``mongodb`` + + :param str database: database to store jobs in + :param str collection: collection to store jobs in + :param client: a :class:`~pymongo.mongo_client.MongoClient` instance to use instead of + providing connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, database='apscheduler', collection='jobs', client=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(MongoDBJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + + if not database: + raise ValueError('The "database" parameter must not be empty') + if not collection: + raise ValueError('The "collection" parameter must not be empty') + + if client: + self.client = maybe_ref(client) + else: + connect_args.setdefault('w', 1) + self.client = MongoClient(**connect_args) + + self.collection = self.client[database][collection] + + def start(self, scheduler, alias): + super(MongoDBJobStore, self).start(scheduler, alias) + self.collection.ensure_index('next_run_time', sparse=True) + + @property + def connection(self): + warnings.warn('The "connection" member is deprecated -- use "client" instead', + DeprecationWarning) + return self.client + + def lookup_job(self, job_id): + document = self.collection.find_one(job_id, ['job_state']) + return self._reconstitute_job(document['job_state']) if document else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + return self._get_jobs({'next_run_time': {'$lte': timestamp}}) + + def get_next_run_time(self): + document = self.collection.find_one({'next_run_time': {'$ne': None}}, + projection=['next_run_time'], + sort=[('next_run_time', ASCENDING)]) + return utc_timestamp_to_datetime(document['next_run_time']) if document else None + + def get_all_jobs(self): + jobs = self._get_jobs({}) + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + try: + self.collection.insert({ + '_id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + }) + except DuplicateKeyError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + result = self.collection.update({'_id': job.id}, {'$set': changes}) + if result and result['n'] == 0: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + result = self.collection.remove(job_id) + if result and result['n'] == 0: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + self.collection.remove() + + def shutdown(self): + self.client.close() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, conditions): + jobs = [] + failed_job_ids = [] + for document in self.collection.find(conditions, ['_id', 'job_state'], + sort=[('next_run_time', ASCENDING)]): + try: + jobs.append(self._reconstitute_job(document['job_state'])) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', + document['_id']) + failed_job_ids.append(document['_id']) + + # Remove all the jobs we failed to restore + if failed_job_ids: + self.collection.remove({'_id': {'$in': failed_job_ids}}) + + return jobs + + def __repr__(self): + return '<%s (client=%s)>' % (self.__class__.__name__, self.client) diff --git a/lib/apscheduler/jobstores/mongodb_store.py b/lib/apscheduler/jobstores/mongodb_store.py deleted file mode 100644 index 3f522c25..00000000 --- a/lib/apscheduler/jobstores/mongodb_store.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Stores jobs in a MongoDB database. -""" -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job - -try: - import cPickle as pickle -except ImportError: # pragma: nocover - import pickle - -try: - from bson.binary import Binary - from pymongo.connection import Connection -except ImportError: # pragma: nocover - raise ImportError('MongoDBJobStore requires PyMongo installed') - -logger = logging.getLogger(__name__) - - -class MongoDBJobStore(JobStore): - def __init__(self, database='apscheduler', collection='jobs', - connection=None, pickle_protocol=pickle.HIGHEST_PROTOCOL, - **connect_args): - self.jobs = [] - self.pickle_protocol = pickle_protocol - - if not database: - raise ValueError('The "database" parameter must not be empty') - if not collection: - raise ValueError('The "collection" parameter must not be empty') - - if connection: - self.connection = connection - else: - self.connection = Connection(**connect_args) - - self.collection = self.connection[database][collection] - - def add_job(self, job): - job_dict = job.__getstate__() - job_dict['trigger'] = Binary(pickle.dumps(job.trigger, - self.pickle_protocol)) - job_dict['args'] = Binary(pickle.dumps(job.args, - self.pickle_protocol)) - job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs, - self.pickle_protocol)) - job.id = self.collection.insert(job_dict) - self.jobs.append(job) - - def remove_job(self, job): - self.collection.remove(job.id) - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for job_dict in self.collection.find(): - try: - job = Job.__new__(Job) - job_dict['id'] = job_dict.pop('_id') - job_dict['trigger'] = pickle.loads(job_dict['trigger']) - job_dict['args'] = pickle.loads(job_dict['args']) - job_dict['kwargs'] = pickle.loads(job_dict['kwargs']) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - self.jobs = jobs - - def update_job(self, job): - spec = {'_id': job.id} - document = {'$set': {'next_run_time': job.next_run_time}, - '$inc': {'runs': 1}} - self.collection.update(spec, document) - - def close(self): - self.connection.disconnect() - - def __repr__(self): - connection = self.collection.database.connection - return '<%s (connection=%s)>' % (self.__class__.__name__, connection) diff --git a/lib/apscheduler/jobstores/ram_store.py b/lib/apscheduler/jobstores/ram_store.py deleted file mode 100644 index 85091fe8..00000000 --- a/lib/apscheduler/jobstores/ram_store.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Stores jobs in an array in RAM. Provides no persistence support. -""" - -from apscheduler.jobstores.base import JobStore - - -class RAMJobStore(JobStore): - def __init__(self): - self.jobs = [] - - def add_job(self, job): - self.jobs.append(job) - - def update_job(self, job): - pass - - def remove_job(self, job): - self.jobs.remove(job) - - def load_jobs(self): - pass - - def __repr__(self): - return '<%s>' % (self.__class__.__name__) diff --git a/lib/apscheduler/jobstores/redis.py b/lib/apscheduler/jobstores/redis.py new file mode 100644 index 00000000..4e092f7d --- /dev/null +++ b/lib/apscheduler/jobstores/redis.py @@ -0,0 +1,146 @@ +from __future__ import absolute_import +from datetime import datetime + +from pytz import utc +import six + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from redis import StrictRedis +except ImportError: # pragma: nocover + raise ImportError('RedisJobStore requires redis installed') + + +class RedisJobStore(BaseJobStore): + """ + Stores jobs in a Redis database. Any leftover keyword arguments are directly passed to redis's + :class:`~redis.StrictRedis`. + + Plugin alias: ``redis`` + + :param int db: the database number to store jobs in + :param str jobs_key: key to store jobs in + :param str run_times_key: key to store the jobs' run times in + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times', + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(RedisJobStore, self).__init__() + + if db is None: + raise ValueError('The "db" parameter must not be empty') + if not jobs_key: + raise ValueError('The "jobs_key" parameter must not be empty') + if not run_times_key: + raise ValueError('The "run_times_key" parameter must not be empty') + + self.pickle_protocol = pickle_protocol + self.jobs_key = jobs_key + self.run_times_key = run_times_key + self.redis = StrictRedis(db=int(db), **connect_args) + + def lookup_job(self, job_id): + job_state = self.redis.hget(self.jobs_key, job_id) + return self._reconstitute_job(job_state) if job_state else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp) + if job_ids: + job_states = self.redis.hmget(self.jobs_key, *job_ids) + return self._reconstitute_jobs(six.moves.zip(job_ids, job_states)) + return [] + + def get_next_run_time(self): + next_run_time = self.redis.zrange(self.run_times_key, 0, 0, withscores=True) + if next_run_time: + return utc_timestamp_to_datetime(next_run_time[0][1]) + + def get_all_jobs(self): + job_states = self.redis.hgetall(self.jobs_key) + jobs = self._reconstitute_jobs(six.iteritems(job_states)) + paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) + return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key) + + def add_job(self, job): + if self.redis.hexists(self.jobs_key, job.id): + raise ConflictingIdError(job.id) + + with self.redis.pipeline() as pipe: + pipe.multi() + pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), + self.pickle_protocol)) + if job.next_run_time: + pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) + pipe.execute() + + def update_job(self, job): + if not self.redis.hexists(self.jobs_key, job.id): + raise JobLookupError(job.id) + + with self.redis.pipeline() as pipe: + pipe.hset(self.jobs_key, job.id, pickle.dumps(job.__getstate__(), + self.pickle_protocol)) + if job.next_run_time: + pipe.zadd(self.run_times_key, datetime_to_utc_timestamp(job.next_run_time), job.id) + else: + pipe.zrem(self.run_times_key, job.id) + pipe.execute() + + def remove_job(self, job_id): + if not self.redis.hexists(self.jobs_key, job_id): + raise JobLookupError(job_id) + + with self.redis.pipeline() as pipe: + pipe.hdel(self.jobs_key, job_id) + pipe.zrem(self.run_times_key, job_id) + pipe.execute() + + def remove_all_jobs(self): + with self.redis.pipeline() as pipe: + pipe.delete(self.jobs_key) + pipe.delete(self.run_times_key) + pipe.execute() + + def shutdown(self): + self.redis.connection_pool.disconnect() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _reconstitute_jobs(self, job_states): + jobs = [] + failed_job_ids = [] + for job_id, job_state in job_states: + try: + jobs.append(self._reconstitute_job(job_state)) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', job_id) + failed_job_ids.append(job_id) + + # Remove all the jobs we failed to restore + if failed_job_ids: + with self.redis.pipeline() as pipe: + pipe.hdel(self.jobs_key, *failed_job_ids) + pipe.zrem(self.run_times_key, *failed_job_ids) + pipe.execute() + + return jobs + + def __repr__(self): + return '<%s>' % self.__class__.__name__ diff --git a/lib/apscheduler/jobstores/rethinkdb.py b/lib/apscheduler/jobstores/rethinkdb.py new file mode 100644 index 00000000..2185c6cc --- /dev/null +++ b/lib/apscheduler/jobstores/rethinkdb.py @@ -0,0 +1,153 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + import rethinkdb as r +except ImportError: # pragma: nocover + raise ImportError('RethinkDBJobStore requires rethinkdb installed') + + +class RethinkDBJobStore(BaseJobStore): + """ + Stores jobs in a RethinkDB database. Any leftover keyword arguments are directly passed to + rethinkdb's `RethinkdbClient `_. + + Plugin alias: ``rethinkdb`` + + :param str database: database to store jobs in + :param str collection: collection to store jobs in + :param client: a :class:`rethinkdb.net.Connection` instance to use instead of providing + connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, database='apscheduler', table='jobs', client=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(RethinkDBJobStore, self).__init__() + + if not database: + raise ValueError('The "database" parameter must not be empty') + if not table: + raise ValueError('The "table" parameter must not be empty') + + self.database = database + self.table = table + self.client = client + self.pickle_protocol = pickle_protocol + self.connect_args = connect_args + self.conn = None + + def start(self, scheduler, alias): + super(RethinkDBJobStore, self).start(scheduler, alias) + + if self.client: + self.conn = maybe_ref(self.client) + else: + self.conn = r.connect(db=self.database, **self.connect_args) + + if self.database not in r.db_list().run(self.conn): + r.db_create(self.database).run(self.conn) + + if self.table not in r.table_list().run(self.conn): + r.table_create(self.table).run(self.conn) + + if 'next_run_time' not in r.table(self.table).index_list().run(self.conn): + r.table(self.table).index_create('next_run_time').run(self.conn) + + self.table = r.db(self.database).table(self.table) + + def lookup_job(self, job_id): + results = list(self.table.get_all(job_id).pluck('job_state').run(self.conn)) + return self._reconstitute_job(results[0]['job_state']) if results else None + + def get_due_jobs(self, now): + return self._get_jobs(r.row['next_run_time'] <= datetime_to_utc_timestamp(now)) + + def get_next_run_time(self): + results = list( + self.table + .filter(r.row['next_run_time'] != None) # flake8: noqa + .order_by(r.asc('next_run_time')) + .map(lambda x: x['next_run_time']) + .limit(1) + .run(self.conn) + ) + return utc_timestamp_to_datetime(results[0]) if results else None + + def get_all_jobs(self): + jobs = self._get_jobs() + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + job_dict = { + 'id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + results = self.table.insert(job_dict).run(self.conn) + if results['errors'] > 0: + raise ConflictingIdError(job.id) + + def update_job(self, job): + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': r.binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) + } + results = self.table.get_all(job.id).update(changes).run(self.conn) + skipped = False in map(lambda x: results[x] == 0, results.keys()) + if results['skipped'] > 0 or results['errors'] > 0 or not skipped: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + results = self.table.get_all(job_id).delete().run(self.conn) + if results['deleted'] + results['skipped'] != 1: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + self.table.delete().run(self.conn) + + def shutdown(self): + self.conn.close() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, predicate=None): + jobs = [] + failed_job_ids = [] + query = (self.table.filter(r.row['next_run_time'] != None).filter(predicate) if + predicate else self.table) + query = query.order_by('next_run_time', 'id').pluck('id', 'job_state') + + for document in query.run(self.conn): + try: + jobs.append(self._reconstitute_job(document['job_state'])) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', document['id']) + failed_job_ids.append(document['id']) + + # Remove all the jobs we failed to restore + if failed_job_ids: + r.expr(failed_job_ids).for_each( + lambda job_id: self.table.get_all(job_id).delete()).run(self.conn) + + return jobs + + def __repr__(self): + connection = self.conn + return '<%s (connection=%s)>' % (self.__class__.__name__, connection) diff --git a/lib/apscheduler/jobstores/shelve_store.py b/lib/apscheduler/jobstores/shelve_store.py deleted file mode 100644 index 87c95f8f..00000000 --- a/lib/apscheduler/jobstores/shelve_store.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Stores jobs in a file governed by the :mod:`shelve` module. -""" - -import shelve -import pickle -import random -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job -from apscheduler.util import itervalues - -logger = logging.getLogger(__name__) - - -class ShelveJobStore(JobStore): - MAX_ID = 1000000 - - def __init__(self, path, pickle_protocol=pickle.HIGHEST_PROTOCOL): - self.jobs = [] - self.path = path - self.pickle_protocol = pickle_protocol - self.store = shelve.open(path, 'c', self.pickle_protocol) - - def _generate_id(self): - id = None - while not id: - id = str(random.randint(1, self.MAX_ID)) - if not id in self.store: - return id - - def add_job(self, job): - job.id = self._generate_id() - self.jobs.append(job) - self.store[job.id] = job.__getstate__() - - def update_job(self, job): - job_dict = self.store[job.id] - job_dict['next_run_time'] = job.next_run_time - job_dict['runs'] = job.runs - self.store[job.id] = job_dict - - def remove_job(self, job): - del self.store[job.id] - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for job_dict in itervalues(self.store): - try: - job = Job.__new__(Job) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - - self.jobs = jobs - - def close(self): - self.store.close() - - def __repr__(self): - return '<%s (path=%s)>' % (self.__class__.__name__, self.path) diff --git a/lib/apscheduler/jobstores/sqlalchemy.py b/lib/apscheduler/jobstores/sqlalchemy.py new file mode 100644 index 00000000..b82696e2 --- /dev/null +++ b/lib/apscheduler/jobstores/sqlalchemy.py @@ -0,0 +1,148 @@ +from __future__ import absolute_import + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from sqlalchemy import ( + create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select) + from sqlalchemy.exc import IntegrityError + from sqlalchemy.sql.expression import null +except ImportError: # pragma: nocover + raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed') + + +class SQLAlchemyJobStore(BaseJobStore): + """ + Stores jobs in a database table using SQLAlchemy. + The table will be created if it doesn't exist in the database. + + Plugin alias: ``sqlalchemy`` + + :param str url: connection string (see `SQLAlchemy documentation + `_ + on this) + :param engine: an SQLAlchemy Engine to use instead of creating a new one based on ``url`` + :param str tablename: name of the table to store jobs in + :param metadata: a :class:`~sqlalchemy.MetaData` instance to use instead of creating a new one + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None, + pickle_protocol=pickle.HIGHEST_PROTOCOL): + super(SQLAlchemyJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + metadata = maybe_ref(metadata) or MetaData() + + if engine: + self.engine = maybe_ref(engine) + elif url: + self.engine = create_engine(url) + else: + raise ValueError('Need either "engine" or "url" defined') + + # 191 = max key length in MySQL for InnoDB/utf8mb4 tables, + # 25 = precision that translates to an 8-byte float + self.jobs_t = Table( + tablename, metadata, + Column('id', Unicode(191, _warn_on_bytestring=False), primary_key=True), + Column('next_run_time', Float(25), index=True), + Column('job_state', LargeBinary, nullable=False) + ) + + def start(self, scheduler, alias): + super(SQLAlchemyJobStore, self).start(scheduler, alias) + self.jobs_t.create(self.engine, True) + + def lookup_job(self, job_id): + selectable = select([self.jobs_t.c.job_state]).where(self.jobs_t.c.id == job_id) + job_state = self.engine.execute(selectable).scalar() + return self._reconstitute_job(job_state) if job_state else None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + return self._get_jobs(self.jobs_t.c.next_run_time <= timestamp) + + def get_next_run_time(self): + selectable = select([self.jobs_t.c.next_run_time]).\ + where(self.jobs_t.c.next_run_time != null()).\ + order_by(self.jobs_t.c.next_run_time).limit(1) + next_run_time = self.engine.execute(selectable).scalar() + return utc_timestamp_to_datetime(next_run_time) + + def get_all_jobs(self): + jobs = self._get_jobs() + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + insert = self.jobs_t.insert().values(**{ + 'id': job.id, + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) + }) + try: + self.engine.execute(insert) + except IntegrityError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + update = self.jobs_t.update().values(**{ + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': pickle.dumps(job.__getstate__(), self.pickle_protocol) + }).where(self.jobs_t.c.id == job.id) + result = self.engine.execute(update) + if result.rowcount == 0: + raise JobLookupError(id) + + def remove_job(self, job_id): + delete = self.jobs_t.delete().where(self.jobs_t.c.id == job_id) + result = self.engine.execute(delete) + if result.rowcount == 0: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + delete = self.jobs_t.delete() + self.engine.execute(delete) + + def shutdown(self): + self.engine.dispose() + + def _reconstitute_job(self, job_state): + job_state = pickle.loads(job_state) + job_state['jobstore'] = self + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self, *conditions): + jobs = [] + selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\ + order_by(self.jobs_t.c.next_run_time) + selectable = selectable.where(*conditions) if conditions else selectable + failed_job_ids = set() + for row in self.engine.execute(selectable): + try: + jobs.append(self._reconstitute_job(row.job_state)) + except: + self._logger.exception('Unable to restore job "%s" -- removing it', row.id) + failed_job_ids.add(row.id) + + # Remove all the jobs we failed to restore + if failed_job_ids: + delete = self.jobs_t.delete().where(self.jobs_t.c.id.in_(failed_job_ids)) + self.engine.execute(delete) + + return jobs + + def __repr__(self): + return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url) diff --git a/lib/apscheduler/jobstores/sqlalchemy_store.py b/lib/apscheduler/jobstores/sqlalchemy_store.py deleted file mode 100644 index 8ece7e24..00000000 --- a/lib/apscheduler/jobstores/sqlalchemy_store.py +++ /dev/null @@ -1,87 +0,0 @@ -""" -Stores jobs in a database table using SQLAlchemy. -""" -import pickle -import logging - -from apscheduler.jobstores.base import JobStore -from apscheduler.job import Job - -try: - from sqlalchemy import * -except ImportError: # pragma: nocover - raise ImportError('SQLAlchemyJobStore requires SQLAlchemy installed') - -logger = logging.getLogger(__name__) - - -class SQLAlchemyJobStore(JobStore): - def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', - metadata=None, pickle_protocol=pickle.HIGHEST_PROTOCOL): - self.jobs = [] - self.pickle_protocol = pickle_protocol - - if engine: - self.engine = engine - elif url: - self.engine = create_engine(url) - else: - raise ValueError('Need either "engine" or "url" defined') - - self.jobs_t = Table(tablename, metadata or MetaData(), - Column('id', Integer, - Sequence(tablename + '_id_seq', optional=True), - primary_key=True), - Column('trigger', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('func_ref', String(1024), nullable=False), - Column('args', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('kwargs', PickleType(pickle_protocol, mutable=False), - nullable=False), - Column('name', Unicode(1024), unique=True), - Column('misfire_grace_time', Integer, nullable=False), - Column('coalesce', Boolean, nullable=False), - Column('max_runs', Integer), - Column('max_instances', Integer), - Column('next_run_time', DateTime, nullable=False), - Column('runs', BigInteger)) - - self.jobs_t.create(self.engine, True) - - def add_job(self, job): - job_dict = job.__getstate__() - result = self.engine.execute(self.jobs_t.insert().values(**job_dict)) - job.id = result.inserted_primary_key[0] - self.jobs.append(job) - - def remove_job(self, job): - delete = self.jobs_t.delete().where(self.jobs_t.c.id == job.id) - self.engine.execute(delete) - self.jobs.remove(job) - - def load_jobs(self): - jobs = [] - for row in self.engine.execute(select([self.jobs_t])): - try: - job = Job.__new__(Job) - job_dict = dict(row.items()) - job.__setstate__(job_dict) - jobs.append(job) - except Exception: - job_name = job_dict.get('name', '(unknown)') - logger.exception('Unable to restore job "%s"', job_name) - self.jobs = jobs - - def update_job(self, job): - job_dict = job.__getstate__() - update = self.jobs_t.update().where(self.jobs_t.c.id == job.id).\ - values(next_run_time=job_dict['next_run_time'], - runs=job_dict['runs']) - self.engine.execute(update) - - def close(self): - self.engine.dispose() - - def __repr__(self): - return '<%s (url=%s)>' % (self.__class__.__name__, self.engine.url) diff --git a/lib/apscheduler/jobstores/zookeeper.py b/lib/apscheduler/jobstores/zookeeper.py new file mode 100644 index 00000000..44e2e61f --- /dev/null +++ b/lib/apscheduler/jobstores/zookeeper.py @@ -0,0 +1,179 @@ +from __future__ import absolute_import + +import os +from datetime import datetime + +from pytz import utc +from kazoo.exceptions import NoNodeError, NodeExistsError + +from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError +from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime +from apscheduler.job import Job + +try: + import cPickle as pickle +except ImportError: # pragma: nocover + import pickle + +try: + from kazoo.client import KazooClient +except ImportError: # pragma: nocover + raise ImportError('ZooKeeperJobStore requires Kazoo installed') + + +class ZooKeeperJobStore(BaseJobStore): + """ + Stores jobs in a ZooKeeper tree. Any leftover keyword arguments are directly passed to + kazoo's `KazooClient + `_. + + Plugin alias: ``zookeeper`` + + :param str path: path to store jobs in + :param client: a :class:`~kazoo.client.KazooClient` instance to use instead of + providing connection arguments + :param int pickle_protocol: pickle protocol level to use (for serialization), defaults to the + highest available + """ + + def __init__(self, path='/apscheduler', client=None, close_connection_on_exit=False, + pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): + super(ZooKeeperJobStore, self).__init__() + self.pickle_protocol = pickle_protocol + self.close_connection_on_exit = close_connection_on_exit + + if not path: + raise ValueError('The "path" parameter must not be empty') + + self.path = path + + if client: + self.client = maybe_ref(client) + else: + self.client = KazooClient(**connect_args) + self._ensured_path = False + + def _ensure_paths(self): + if not self._ensured_path: + self.client.ensure_path(self.path) + self._ensured_path = True + + def start(self, scheduler, alias): + super(ZooKeeperJobStore, self).start(scheduler, alias) + if not self.client.connected: + self.client.start() + + def lookup_job(self, job_id): + self._ensure_paths() + node_path = os.path.join(self.path, job_id) + try: + content, _ = self.client.get(node_path) + doc = pickle.loads(content) + job = self._reconstitute_job(doc['job_state']) + return job + except: + return None + + def get_due_jobs(self, now): + timestamp = datetime_to_utc_timestamp(now) + jobs = [job_def['job'] for job_def in self._get_jobs() + if job_def['next_run_time'] is not None and job_def['next_run_time'] <= timestamp] + return jobs + + def get_next_run_time(self): + next_runs = [job_def['next_run_time'] for job_def in self._get_jobs() + if job_def['next_run_time'] is not None] + return utc_timestamp_to_datetime(min(next_runs)) if len(next_runs) > 0 else None + + def get_all_jobs(self): + jobs = [job_def['job'] for job_def in self._get_jobs()] + self._fix_paused_jobs_sorting(jobs) + return jobs + + def add_job(self, job): + self._ensure_paths() + node_path = os.path.join(self.path, str(job.id)) + value = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': job.__getstate__() + } + data = pickle.dumps(value, self.pickle_protocol) + try: + self.client.create(node_path, value=data) + except NodeExistsError: + raise ConflictingIdError(job.id) + + def update_job(self, job): + self._ensure_paths() + node_path = os.path.join(self.path, str(job.id)) + changes = { + 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), + 'job_state': job.__getstate__() + } + data = pickle.dumps(changes, self.pickle_protocol) + try: + self.client.set(node_path, value=data) + except NoNodeError: + raise JobLookupError(job.id) + + def remove_job(self, job_id): + self._ensure_paths() + node_path = os.path.join(self.path, str(job_id)) + try: + self.client.delete(node_path) + except NoNodeError: + raise JobLookupError(job_id) + + def remove_all_jobs(self): + try: + self.client.delete(self.path, recursive=True) + except NoNodeError: + pass + self._ensured_path = False + + def shutdown(self): + if self.close_connection_on_exit: + self.client.stop() + self.client.close() + + def _reconstitute_job(self, job_state): + job_state = job_state + job = Job.__new__(Job) + job.__setstate__(job_state) + job._scheduler = self._scheduler + job._jobstore_alias = self._alias + return job + + def _get_jobs(self): + self._ensure_paths() + jobs = [] + failed_job_ids = [] + all_ids = self.client.get_children(self.path) + for node_name in all_ids: + try: + node_path = os.path.join(self.path, node_name) + content, _ = self.client.get(node_path) + doc = pickle.loads(content) + job_def = { + 'job_id': node_name, + 'next_run_time': doc['next_run_time'] if doc['next_run_time'] else None, + 'job_state': doc['job_state'], + 'job': self._reconstitute_job(doc['job_state']), + 'creation_time': _.ctime + } + jobs.append(job_def) + except: + self._logger.exception('Unable to restore job "%s" -- removing it' % node_name) + failed_job_ids.append(node_name) + + # Remove all the jobs we failed to restore + if failed_job_ids: + for failed_id in failed_job_ids: + self.remove_job(failed_id) + paused_sort_key = datetime(9999, 12, 31, tzinfo=utc) + return sorted(jobs, key=lambda job_def: (job_def['job'].next_run_time or paused_sort_key, + job_def['creation_time'])) + + def __repr__(self): + self._logger.exception('<%s (client=%s)>' % (self.__class__.__name__, self.client)) + return '<%s (client=%s)>' % (self.__class__.__name__, self.client) diff --git a/lib/apscheduler/scheduler.py b/lib/apscheduler/scheduler.py deleted file mode 100644 index ee08ad8b..00000000 --- a/lib/apscheduler/scheduler.py +++ /dev/null @@ -1,559 +0,0 @@ -""" -This module is the main part of the library. It houses the Scheduler class -and related exceptions. -""" - -from threading import Thread, Event, Lock -from datetime import datetime, timedelta -from logging import getLogger -import os -import sys - -from apscheduler.util import * -from apscheduler.triggers import SimpleTrigger, IntervalTrigger, CronTrigger -from apscheduler.jobstores.ram_store import RAMJobStore -from apscheduler.job import Job, MaxInstancesReachedError -from apscheduler.events import * -from apscheduler.threadpool import ThreadPool - -logger = getLogger(__name__) - - -class SchedulerAlreadyRunningError(Exception): - """ - Raised when attempting to start or configure the scheduler when it's - already running. - """ - - def __str__(self): - return 'Scheduler is already running' - - -class Scheduler(object): - """ - This class is responsible for scheduling jobs and triggering - their execution. - """ - - _stopped = False - _thread = None - - def __init__(self, gconfig={}, **options): - self._wakeup = Event() - self._jobstores = {} - self._jobstores_lock = Lock() - self._listeners = [] - self._listeners_lock = Lock() - self._pending_jobs = [] - self.configure(gconfig, **options) - - def configure(self, gconfig={}, **options): - """ - Reconfigures the scheduler with the given options. Can only be done - when the scheduler isn't running. - """ - if self.running: - raise SchedulerAlreadyRunningError - - # Set general options - config = combine_opts(gconfig, 'apscheduler.', options) - self.misfire_grace_time = int(config.pop('misfire_grace_time', 1)) - self.coalesce = asbool(config.pop('coalesce', True)) - self.daemonic = asbool(config.pop('daemonic', True)) - - # Configure the thread pool - if 'threadpool' in config: - self._threadpool = maybe_ref(config['threadpool']) - else: - threadpool_opts = combine_opts(config, 'threadpool.') - self._threadpool = ThreadPool(**threadpool_opts) - - # Configure job stores - jobstore_opts = combine_opts(config, 'jobstore.') - jobstores = {} - for key, value in jobstore_opts.items(): - store_name, option = key.split('.', 1) - opts_dict = jobstores.setdefault(store_name, {}) - opts_dict[option] = value - - for alias, opts in jobstores.items(): - classname = opts.pop('class') - cls = maybe_ref(classname) - jobstore = cls(**opts) - self.add_jobstore(jobstore, alias, True) - - def start(self): - """ - Starts the scheduler in a new thread. - """ - if self.running: - raise SchedulerAlreadyRunningError - - # Create a RAMJobStore as the default if there is no default job store - if not 'default' in self._jobstores: - self.add_jobstore(RAMJobStore(), 'default', True) - - # Schedule all pending jobs - for job, jobstore in self._pending_jobs: - self._real_add_job(job, jobstore, False) - del self._pending_jobs[:] - - self._stopped = False - self._thread = Thread(target=self._main_loop, name='APScheduler') - self._thread.setDaemon(self.daemonic) - self._thread.start() - - def shutdown(self, wait=True, shutdown_threadpool=True): - """ - Shuts down the scheduler and terminates the thread. - Does not interrupt any currently running jobs. - - :param wait: ``True`` to wait until all currently executing jobs have - finished (if ``shutdown_threadpool`` is also ``True``) - :param shutdown_threadpool: ``True`` to shut down the thread pool - """ - if not self.running: - return - - self._stopped = True - self._wakeup.set() - - # Shut down the thread pool - if shutdown_threadpool: - self._threadpool.shutdown(wait) - - # Wait until the scheduler thread terminates - self._thread.join() - - @property - def running(self): - return not self._stopped and self._thread and self._thread.isAlive() - - def add_jobstore(self, jobstore, alias, quiet=False): - """ - Adds a job store to this scheduler. - - :param jobstore: job store to be added - :param alias: alias for the job store - :param quiet: True to suppress scheduler thread wakeup - :type jobstore: instance of - :class:`~apscheduler.jobstores.base.JobStore` - :type alias: str - """ - self._jobstores_lock.acquire() - try: - if alias in self._jobstores: - raise KeyError('Alias "%s" is already in use' % alias) - self._jobstores[alias] = jobstore - jobstore.load_jobs() - finally: - self._jobstores_lock.release() - - # Notify listeners that a new job store has been added - self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_ADDED, alias)) - - # Notify the scheduler so it can scan the new job store for jobs - if not quiet: - self._wakeup.set() - - def remove_jobstore(self, alias): - """ - Removes the job store by the given alias from this scheduler. - - :type alias: str - """ - self._jobstores_lock.acquire() - try: - try: - del self._jobstores[alias] - except KeyError: - raise KeyError('No such job store: %s' % alias) - finally: - self._jobstores_lock.release() - - # Notify listeners that a job store has been removed - self._notify_listeners(JobStoreEvent(EVENT_JOBSTORE_REMOVED, alias)) - - def add_listener(self, callback, mask=EVENT_ALL): - """ - Adds a listener for scheduler events. When a matching event occurs, - ``callback`` is executed with the event object as its sole argument. - If the ``mask`` parameter is not provided, the callback will receive - events of all types. - - :param callback: any callable that takes one argument - :param mask: bitmask that indicates which events should be listened to - """ - self._listeners_lock.acquire() - try: - self._listeners.append((callback, mask)) - finally: - self._listeners_lock.release() - - def remove_listener(self, callback): - """ - Removes a previously added event listener. - """ - self._listeners_lock.acquire() - try: - for i, (cb, _) in enumerate(self._listeners): - if callback == cb: - del self._listeners[i] - finally: - self._listeners_lock.release() - - def _notify_listeners(self, event): - self._listeners_lock.acquire() - try: - listeners = tuple(self._listeners) - finally: - self._listeners_lock.release() - - for cb, mask in listeners: - if event.code & mask: - try: - cb(event) - except: - logger.exception('Error notifying listener') - - def _real_add_job(self, job, jobstore, wakeup): - job.compute_next_run_time(datetime.now()) - if not job.next_run_time: - raise ValueError('Not adding job since it would never be run') - - self._jobstores_lock.acquire() - try: - try: - store = self._jobstores[jobstore] - except KeyError: - raise KeyError('No such job store: %s' % jobstore) - store.add_job(job) - finally: - self._jobstores_lock.release() - - # Notify listeners that a new job has been added - event = JobStoreEvent(EVENT_JOBSTORE_JOB_ADDED, jobstore, job) - self._notify_listeners(event) - - logger.info('Added job "%s" to job store "%s"', job, jobstore) - - # Notify the scheduler about the new job - if wakeup: - self._wakeup.set() - - def add_job(self, trigger, func, args, kwargs, jobstore='default', - **options): - """ - Adds the given job to the job list and notifies the scheduler thread. - - :param trigger: alias of the job store to store the job in - :param func: callable to run at the given time - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param jobstore: alias of the job store to store the job in - :rtype: :class:`~apscheduler.job.Job` - """ - job = Job(trigger, func, args or [], kwargs or {}, - options.pop('misfire_grace_time', self.misfire_grace_time), - options.pop('coalesce', self.coalesce), **options) - if not self.running: - self._pending_jobs.append((job, jobstore)) - logger.info('Adding job tentatively -- it will be properly ' - 'scheduled when the scheduler starts') - else: - self._real_add_job(job, jobstore, True) - return job - - def _remove_job(self, job, alias, jobstore): - jobstore.remove_job(job) - - # Notify listeners that a job has been removed - event = JobStoreEvent(EVENT_JOBSTORE_JOB_REMOVED, alias, job) - self._notify_listeners(event) - - logger.info('Removed job "%s"', job) - - def add_date_job(self, func, date, args=None, kwargs=None, **options): - """ - Schedules a job to be completed on a specific date and time. - - :param func: callable to run at the given time - :param date: the date/time to run the job at - :param name: name of the job - :param jobstore: stored the job in the named (or given) job store - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :type date: :class:`datetime.date` - :rtype: :class:`~apscheduler.job.Job` - """ - trigger = SimpleTrigger(date) - return self.add_job(trigger, func, args, kwargs, **options) - - def add_interval_job(self, func, weeks=0, days=0, hours=0, minutes=0, - seconds=0, start_date=None, args=None, kwargs=None, - **options): - """ - Schedules a job to be completed on specified intervals. - - :param func: callable to run - :param weeks: number of weeks to wait - :param days: number of days to wait - :param hours: number of hours to wait - :param minutes: number of minutes to wait - :param seconds: number of seconds to wait - :param start_date: when to first execute the job and start the - counter (default is after the given interval) - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job - :param jobstore: alias of the job store to add the job to - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :rtype: :class:`~apscheduler.job.Job` - """ - interval = timedelta(weeks=weeks, days=days, hours=hours, - minutes=minutes, seconds=seconds) - trigger = IntervalTrigger(interval, start_date) - return self.add_job(trigger, func, args, kwargs, **options) - - def add_cron_job(self, func, year='*', month='*', day='*', week='*', - day_of_week='*', hour='*', minute='*', second='*', - start_date=None, args=None, kwargs=None, **options): - """ - Schedules a job to be completed on times that match the given - expressions. - - :param func: callable to run - :param year: year to run on - :param month: month to run on (0 = January) - :param day: day of month to run on - :param week: week of the year to run on - :param day_of_week: weekday to run on (0 = Monday) - :param hour: hour to run on - :param second: second to run on - :param args: list of positional arguments to call func with - :param kwargs: dict of keyword arguments to call func with - :param name: name of the job - :param jobstore: alias of the job store to add the job to - :param misfire_grace_time: seconds after the designated run time that - the job is still allowed to be run - :return: the scheduled job - :rtype: :class:`~apscheduler.job.Job` - """ - trigger = CronTrigger(year=year, month=month, day=day, week=week, - day_of_week=day_of_week, hour=hour, - minute=minute, second=second, - start_date=start_date) - return self.add_job(trigger, func, args, kwargs, **options) - - def cron_schedule(self, **options): - """ - Decorator version of :meth:`add_cron_job`. - This decorator does not wrap its host function. - Unscheduling decorated functions is possible by passing the ``job`` - attribute of the scheduled function to :meth:`unschedule_job`. - """ - def inner(func): - func.job = self.add_cron_job(func, **options) - return func - return inner - - def interval_schedule(self, **options): - """ - Decorator version of :meth:`add_interval_job`. - This decorator does not wrap its host function. - Unscheduling decorated functions is possible by passing the ``job`` - attribute of the scheduled function to :meth:`unschedule_job`. - """ - def inner(func): - func.job = self.add_interval_job(func, **options) - return func - return inner - - def get_jobs(self): - """ - Returns a list of all scheduled jobs. - - :return: list of :class:`~apscheduler.job.Job` objects - """ - self._jobstores_lock.acquire() - try: - jobs = [] - for jobstore in itervalues(self._jobstores): - jobs.extend(jobstore.jobs) - return jobs - finally: - self._jobstores_lock.release() - - def unschedule_job(self, job): - """ - Removes a job, preventing it from being run any more. - """ - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - if job in list(jobstore.jobs): - self._remove_job(job, alias, jobstore) - return - finally: - self._jobstores_lock.release() - - raise KeyError('Job "%s" is not scheduled in any job store' % job) - - def unschedule_func(self, func): - """ - Removes all jobs that would execute the given function. - """ - found = False - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - for job in list(jobstore.jobs): - if job.func == func: - self._remove_job(job, alias, jobstore) - found = True - finally: - self._jobstores_lock.release() - - if not found: - raise KeyError('The given function is not scheduled in this ' - 'scheduler') - - def print_jobs(self, out=None): - """ - Prints out a textual listing of all jobs currently scheduled on this - scheduler. - - :param out: a file-like object to print to (defaults to **sys.stdout** - if nothing is given) - """ - out = out or sys.stdout - job_strs = [] - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - job_strs.append('Jobstore %s:' % alias) - if jobstore.jobs: - for job in jobstore.jobs: - job_strs.append(' %s' % job) - else: - job_strs.append(' No scheduled jobs') - finally: - self._jobstores_lock.release() - - out.write(os.linesep.join(job_strs)) - - def _run_job(self, job, run_times): - """ - Acts as a harness that runs the actual job code in a thread. - """ - for run_time in run_times: - # See if the job missed its run time window, and handle possible - # misfires accordingly - difference = datetime.now() - run_time - grace_time = timedelta(seconds=job.misfire_grace_time) - if difference > grace_time: - # Notify listeners about a missed run - event = JobEvent(EVENT_JOB_MISSED, job, run_time) - self._notify_listeners(event) - logger.warning('Run time of job "%s" was missed by %s', - job, difference) - else: - try: - job.add_instance() - except MaxInstancesReachedError: - event = JobEvent(EVENT_JOB_MISSED, job, run_time) - self._notify_listeners(event) - logger.warning('Execution of job "%s" skipped: ' - 'maximum number of running instances ' - 'reached (%d)', job, job.max_instances) - break - - logger.info('Running job "%s" (scheduled at %s)', job, - run_time) - - try: - retval = job.func(*job.args, **job.kwargs) - except: - # Notify listeners about the exception - exc, tb = sys.exc_info()[1:] - event = JobEvent(EVENT_JOB_ERROR, job, run_time, - exception=exc, traceback=tb) - self._notify_listeners(event) - - logger.exception('Job "%s" raised an exception', job) - else: - # Notify listeners about successful execution - event = JobEvent(EVENT_JOB_EXECUTED, job, run_time, - retval=retval) - self._notify_listeners(event) - - logger.info('Job "%s" executed successfully', job) - - job.remove_instance() - - # If coalescing is enabled, don't attempt any further runs - if job.coalesce: - break - - def _process_jobs(self, now): - """ - Iterates through jobs in every jobstore, starts pending jobs - and figures out the next wakeup time. - """ - next_wakeup_time = None - self._jobstores_lock.acquire() - try: - for alias, jobstore in iteritems(self._jobstores): - for job in tuple(jobstore.jobs): - run_times = job.get_run_times(now) - if run_times: - self._threadpool.submit(self._run_job, job, run_times) - - # Increase the job's run count - if job.coalesce: - job.runs += 1 - else: - job.runs += len(run_times) - - # Update the job, but don't keep finished jobs around - if job.compute_next_run_time(now + timedelta(microseconds=1)): - jobstore.update_job(job) - else: - self._remove_job(job, alias, jobstore) - - if not next_wakeup_time: - next_wakeup_time = job.next_run_time - elif job.next_run_time: - next_wakeup_time = min(next_wakeup_time, - job.next_run_time) - return next_wakeup_time - finally: - self._jobstores_lock.release() - - def _main_loop(self): - """Executes jobs on schedule.""" - - logger.info('Scheduler started') - self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_START)) - - self._wakeup.clear() - while not self._stopped: - logger.debug('Looking for jobs to run') - now = datetime.now() - next_wakeup_time = self._process_jobs(now) - - # Sleep until the next job is scheduled to be run, - # a new job is added or the scheduler is stopped - if next_wakeup_time is not None: - wait_seconds = time_difference(next_wakeup_time, now) - logger.debug('Next wakeup is due at %s (in %f seconds)', - next_wakeup_time, wait_seconds) - self._wakeup.wait(wait_seconds) - else: - logger.debug('No jobs; waiting until a job is added') - self._wakeup.wait() - self._wakeup.clear() - - logger.info('Scheduler has been shut down') - self._notify_listeners(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN)) diff --git a/lib/apscheduler/schedulers/__init__.py b/lib/apscheduler/schedulers/__init__.py new file mode 100644 index 00000000..bd8a7900 --- /dev/null +++ b/lib/apscheduler/schedulers/__init__.py @@ -0,0 +1,12 @@ +class SchedulerAlreadyRunningError(Exception): + """Raised when attempting to start or configure the scheduler when it's already running.""" + + def __str__(self): + return 'Scheduler is already running' + + +class SchedulerNotRunningError(Exception): + """Raised when attempting to shutdown the scheduler when it's not running.""" + + def __str__(self): + return 'Scheduler is not running' diff --git a/lib/apscheduler/schedulers/asyncio.py b/lib/apscheduler/schedulers/asyncio.py new file mode 100644 index 00000000..a272b1a2 --- /dev/null +++ b/lib/apscheduler/schedulers/asyncio.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + import asyncio +except ImportError: # pragma: nocover + try: + import trollius as asyncio + except ImportError: + raise ImportError( + 'AsyncIOScheduler requires either Python 3.4 or the asyncio package installed') + + +def run_in_event_loop(func): + @wraps(func) + def wrapper(self, *args): + self._eventloop.call_soon_threadsafe(func, self, *args) + return wrapper + + +class AsyncIOScheduler(BaseScheduler): + """ + A scheduler that runs on an asyncio (:pep:`3156`) event loop. + + The default executor can run jobs based on native coroutines (``async def``). + + Extra options: + + ============== ============================================================= + ``event_loop`` AsyncIO event loop to use (defaults to the global event loop) + ============== ============================================================= + """ + + _eventloop = None + _timeout = None + + @run_in_event_loop + def shutdown(self, wait=True): + super(AsyncIOScheduler, self).shutdown(wait) + self._stop_timer() + + def _configure(self, config): + self._eventloop = maybe_ref(config.pop('event_loop', None)) or asyncio.get_event_loop() + super(AsyncIOScheduler, self)._configure(config) + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timeout = self._eventloop.call_later(wait_seconds, self.wakeup) + + def _stop_timer(self): + if self._timeout: + self._timeout.cancel() + del self._timeout + + @run_in_event_loop + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) + + def _create_default_executor(self): + from apscheduler.executors.asyncio import AsyncIOExecutor + return AsyncIOExecutor() diff --git a/lib/apscheduler/schedulers/background.py b/lib/apscheduler/schedulers/background.py new file mode 100644 index 00000000..03f29822 --- /dev/null +++ b/lib/apscheduler/schedulers/background.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import + +from threading import Thread, Event + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.util import asbool + + +class BackgroundScheduler(BlockingScheduler): + """ + A scheduler that runs in the background using a separate thread + (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will return immediately). + + Extra options: + + ========== ============================================================================= + ``daemon`` Set the ``daemon`` option in the background thread (defaults to ``True``, see + `the documentation + `_ + for further details) + ========== ============================================================================= + """ + + _thread = None + + def _configure(self, config): + self._daemon = asbool(config.pop('daemon', True)) + super(BackgroundScheduler, self)._configure(config) + + def start(self, *args, **kwargs): + self._event = Event() + BaseScheduler.start(self, *args, **kwargs) + self._thread = Thread(target=self._main_loop, name='APScheduler') + self._thread.daemon = self._daemon + self._thread.start() + + def shutdown(self, *args, **kwargs): + super(BackgroundScheduler, self).shutdown(*args, **kwargs) + self._thread.join() + del self._thread diff --git a/lib/apscheduler/schedulers/base.py b/lib/apscheduler/schedulers/base.py new file mode 100644 index 00000000..93269092 --- /dev/null +++ b/lib/apscheduler/schedulers/base.py @@ -0,0 +1,1006 @@ +from __future__ import print_function + +from abc import ABCMeta, abstractmethod +from collections import MutableMapping +from threading import RLock +from datetime import datetime, timedelta +from logging import getLogger +import warnings +import sys + +from pkg_resources import iter_entry_points +from tzlocal import get_localzone +import six + +from apscheduler.schedulers import SchedulerAlreadyRunningError, SchedulerNotRunningError +from apscheduler.executors.base import MaxInstancesReachedError, BaseExecutor +from apscheduler.executors.pool import ThreadPoolExecutor +from apscheduler.jobstores.base import ConflictingIdError, JobLookupError, BaseJobStore +from apscheduler.jobstores.memory import MemoryJobStore +from apscheduler.job import Job +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import asbool, asint, astimezone, maybe_ref, timedelta_seconds, undefined +from apscheduler.events import ( + SchedulerEvent, JobEvent, JobSubmissionEvent, EVENT_SCHEDULER_START, EVENT_SCHEDULER_SHUTDOWN, + EVENT_JOBSTORE_ADDED, EVENT_JOBSTORE_REMOVED, EVENT_ALL, EVENT_JOB_MODIFIED, EVENT_JOB_REMOVED, + EVENT_JOB_ADDED, EVENT_EXECUTOR_ADDED, EVENT_EXECUTOR_REMOVED, EVENT_ALL_JOBS_REMOVED, + EVENT_JOB_SUBMITTED, EVENT_JOB_MAX_INSTANCES, EVENT_SCHEDULER_RESUMED, EVENT_SCHEDULER_PAUSED) + +#: constant indicating a scheduler's stopped state +STATE_STOPPED = 0 +#: constant indicating a scheduler's running state (started and processing jobs) +STATE_RUNNING = 1 +#: constant indicating a scheduler's paused state (started but not processing jobs) +STATE_PAUSED = 2 + + +class BaseScheduler(six.with_metaclass(ABCMeta)): + """ + Abstract base class for all schedulers. + + Takes the following keyword arguments: + + :param str|logging.Logger logger: logger to use for the scheduler's logging (defaults to + apscheduler.scheduler) + :param str|datetime.tzinfo timezone: the default time zone (defaults to the local timezone) + :param int|float jobstore_retry_interval: the minimum number of seconds to wait between + retries in the scheduler's main loop if the job store raises an exception when getting + the list of due jobs + :param dict job_defaults: default values for newly added jobs + :param dict jobstores: a dictionary of job store alias -> job store instance or configuration + dict + :param dict executors: a dictionary of executor alias -> executor instance or configuration + dict + + :ivar int state: current running state of the scheduler (one of the following constants from + ``apscheduler.schedulers.base``: ``STATE_STOPPED``, ``STATE_RUNNING``, ``STATE_PAUSED``) + + .. seealso:: :ref:`scheduler-config` + """ + + _trigger_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.triggers')) + _trigger_classes = {} + _executor_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.executors')) + _executor_classes = {} + _jobstore_plugins = dict((ep.name, ep) for ep in iter_entry_points('apscheduler.jobstores')) + _jobstore_classes = {} + + # + # Public API + # + + def __init__(self, gconfig={}, **options): + super(BaseScheduler, self).__init__() + self._executors = {} + self._executors_lock = self._create_lock() + self._jobstores = {} + self._jobstores_lock = self._create_lock() + self._listeners = [] + self._listeners_lock = self._create_lock() + self._pending_jobs = [] + self.state = STATE_STOPPED + self.configure(gconfig, **options) + + def configure(self, gconfig={}, prefix='apscheduler.', **options): + """ + Reconfigures the scheduler with the given options. + + Can only be done when the scheduler isn't running. + + :param dict gconfig: a "global" configuration dictionary whose values can be overridden by + keyword arguments to this method + :param str|unicode prefix: pick only those keys from ``gconfig`` that are prefixed with + this string (pass an empty string or ``None`` to use all keys) + :raises SchedulerAlreadyRunningError: if the scheduler is already running + + """ + if self.state != STATE_STOPPED: + raise SchedulerAlreadyRunningError + + # If a non-empty prefix was given, strip it from the keys in the + # global configuration dict + if prefix: + prefixlen = len(prefix) + gconfig = dict((key[prefixlen:], value) for key, value in six.iteritems(gconfig) + if key.startswith(prefix)) + + # Create a structure from the dotted options + # (e.g. "a.b.c = d" -> {'a': {'b': {'c': 'd'}}}) + config = {} + for key, value in six.iteritems(gconfig): + parts = key.split('.') + parent = config + key = parts.pop(0) + while parts: + parent = parent.setdefault(key, {}) + key = parts.pop(0) + parent[key] = value + + # Override any options with explicit keyword arguments + config.update(options) + self._configure(config) + + def start(self, paused=False): + """ + Start the configured executors and job stores and begin processing scheduled jobs. + + :param bool paused: if ``True``, don't start job processing until :meth:`resume` is called + :raises SchedulerAlreadyRunningError: if the scheduler is already running + + """ + if self.state != STATE_STOPPED: + raise SchedulerAlreadyRunningError + + with self._executors_lock: + # Create a default executor if nothing else is configured + if 'default' not in self._executors: + self.add_executor(self._create_default_executor(), 'default') + + # Start all the executors + for alias, executor in six.iteritems(self._executors): + executor.start(self, alias) + + with self._jobstores_lock: + # Create a default job store if nothing else is configured + if 'default' not in self._jobstores: + self.add_jobstore(self._create_default_jobstore(), 'default') + + # Start all the job stores + for alias, store in six.iteritems(self._jobstores): + store.start(self, alias) + + # Schedule all pending jobs + for job, jobstore_alias, replace_existing in self._pending_jobs: + self._real_add_job(job, jobstore_alias, replace_existing) + del self._pending_jobs[:] + + self.state = STATE_PAUSED if paused else STATE_RUNNING + self._logger.info('Scheduler started') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_START)) + + if not paused: + self.wakeup() + + @abstractmethod + def shutdown(self, wait=True): + """ + Shuts down the scheduler, along with its executors and job stores. + + Does not interrupt any currently running jobs. + + :param bool wait: ``True`` to wait until all currently executing jobs have finished + :raises SchedulerNotRunningError: if the scheduler has not been started yet + + """ + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + + self.state = STATE_STOPPED + + with self._jobstores_lock, self._executors_lock: + # Shut down all executors + for executor in six.itervalues(self._executors): + executor.shutdown(wait) + + # Shut down all job stores + for jobstore in six.itervalues(self._jobstores): + jobstore.shutdown() + + self._logger.info('Scheduler has been shut down') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_SHUTDOWN)) + + def pause(self): + """ + Pause job processing in the scheduler. + + This will prevent the scheduler from waking up to do job processing until :meth:`resume` + is called. It will not however stop any already running job processing. + + """ + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + elif self.state == STATE_RUNNING: + self.state = STATE_PAUSED + self._logger.info('Paused scheduler job processing') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_PAUSED)) + + def resume(self): + """Resume job processing in the scheduler.""" + if self.state == STATE_STOPPED: + raise SchedulerNotRunningError + elif self.state == STATE_PAUSED: + self.state = STATE_RUNNING + self._logger.info('Resumed scheduler job processing') + self._dispatch_event(SchedulerEvent(EVENT_SCHEDULER_RESUMED)) + self.wakeup() + + @property + def running(self): + """ + Return ``True`` if the scheduler has been started. + + This is a shortcut for ``scheduler.state != STATE_STOPPED``. + + """ + return self.state != STATE_STOPPED + + def add_executor(self, executor, alias='default', **executor_opts): + """ + Adds an executor to this scheduler. + + Any extra keyword arguments will be passed to the executor plugin's constructor, assuming + that the first argument is the name of an executor plugin. + + :param str|unicode|apscheduler.executors.base.BaseExecutor executor: either an executor + instance or the name of an executor plugin + :param str|unicode alias: alias for the scheduler + :raises ValueError: if there is already an executor by the given alias + + """ + with self._executors_lock: + if alias in self._executors: + raise ValueError('This scheduler already has an executor by the alias of "%s"' % + alias) + + if isinstance(executor, BaseExecutor): + self._executors[alias] = executor + elif isinstance(executor, six.string_types): + self._executors[alias] = executor = self._create_plugin_instance( + 'executor', executor, executor_opts) + else: + raise TypeError('Expected an executor instance or a string, got %s instead' % + executor.__class__.__name__) + + # Start the executor right away if the scheduler is running + if self.state != STATE_STOPPED: + executor.start(self, alias) + + self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_ADDED, alias)) + + def remove_executor(self, alias, shutdown=True): + """ + Removes the executor by the given alias from this scheduler. + + :param str|unicode alias: alias of the executor + :param bool shutdown: ``True`` to shut down the executor after + removing it + + """ + with self._executors_lock: + executor = self._lookup_executor(alias) + del self._executors[alias] + + if shutdown: + executor.shutdown() + + self._dispatch_event(SchedulerEvent(EVENT_EXECUTOR_REMOVED, alias)) + + def add_jobstore(self, jobstore, alias='default', **jobstore_opts): + """ + Adds a job store to this scheduler. + + Any extra keyword arguments will be passed to the job store plugin's constructor, assuming + that the first argument is the name of a job store plugin. + + :param str|unicode|apscheduler.jobstores.base.BaseJobStore jobstore: job store to be added + :param str|unicode alias: alias for the job store + :raises ValueError: if there is already a job store by the given alias + + """ + with self._jobstores_lock: + if alias in self._jobstores: + raise ValueError('This scheduler already has a job store by the alias of "%s"' % + alias) + + if isinstance(jobstore, BaseJobStore): + self._jobstores[alias] = jobstore + elif isinstance(jobstore, six.string_types): + self._jobstores[alias] = jobstore = self._create_plugin_instance( + 'jobstore', jobstore, jobstore_opts) + else: + raise TypeError('Expected a job store instance or a string, got %s instead' % + jobstore.__class__.__name__) + + # Start the job store right away if the scheduler isn't stopped + if self.state != STATE_STOPPED: + jobstore.start(self, alias) + + # Notify listeners that a new job store has been added + self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_ADDED, alias)) + + # Notify the scheduler so it can scan the new job store for jobs + if self.state != STATE_STOPPED: + self.wakeup() + + def remove_jobstore(self, alias, shutdown=True): + """ + Removes the job store by the given alias from this scheduler. + + :param str|unicode alias: alias of the job store + :param bool shutdown: ``True`` to shut down the job store after removing it + + """ + with self._jobstores_lock: + jobstore = self._lookup_jobstore(alias) + del self._jobstores[alias] + + if shutdown: + jobstore.shutdown() + + self._dispatch_event(SchedulerEvent(EVENT_JOBSTORE_REMOVED, alias)) + + def add_listener(self, callback, mask=EVENT_ALL): + """ + add_listener(callback, mask=EVENT_ALL) + + Adds a listener for scheduler events. + + When a matching event occurs, ``callback`` is executed with the event object as its + sole argument. If the ``mask`` parameter is not provided, the callback will receive events + of all types. + + :param callback: any callable that takes one argument + :param int mask: bitmask that indicates which events should be + listened to + + .. seealso:: :mod:`apscheduler.events` + .. seealso:: :ref:`scheduler-events` + + """ + with self._listeners_lock: + self._listeners.append((callback, mask)) + + def remove_listener(self, callback): + """Removes a previously added event listener.""" + + with self._listeners_lock: + for i, (cb, _) in enumerate(self._listeners): + if callback == cb: + del self._listeners[i] + + def add_job(self, func, trigger=None, args=None, kwargs=None, id=None, name=None, + misfire_grace_time=undefined, coalesce=undefined, max_instances=undefined, + next_run_time=undefined, jobstore='default', executor='default', + replace_existing=False, **trigger_args): + """ + add_job(func, trigger=None, args=None, kwargs=None, id=None, \ + name=None, misfire_grace_time=undefined, coalesce=undefined, \ + max_instances=undefined, next_run_time=undefined, \ + jobstore='default', executor='default', \ + replace_existing=False, **trigger_args) + + Adds the given job to the job list and wakes up the scheduler if it's already running. + + Any option that defaults to ``undefined`` will be replaced with the corresponding default + value when the job is scheduled (which happens when the scheduler is started, or + immediately if the scheduler is already running). + + The ``func`` argument can be given either as a callable object or a textual reference in + the ``package.module:some.object`` format, where the first half (separated by ``:``) is an + importable module and the second half is a reference to the callable object, relative to + the module. + + The ``trigger`` argument can either be: + #. the alias name of the trigger (e.g. ``date``, ``interval`` or ``cron``), in which case + any extra keyword arguments to this method are passed on to the trigger's constructor + #. an instance of a trigger class + + :param func: callable (or a textual reference to one) to run at the given time + :param str|apscheduler.triggers.base.BaseTrigger trigger: trigger that determines when + ``func`` is called + :param list|tuple args: list of positional arguments to call func with + :param dict kwargs: dict of keyword arguments to call func with + :param str|unicode id: explicit identifier for the job (for modifying it later) + :param str|unicode name: textual description of the job + :param int misfire_grace_time: seconds after the designated runtime that the job is still + allowed to be run + :param bool coalesce: run once instead of many times if the scheduler determines that the + job should be run more than once in succession + :param int max_instances: maximum number of concurrently running instances allowed for this + job + :param datetime next_run_time: when to first run the job, regardless of the trigger (pass + ``None`` to add the job as paused) + :param str|unicode jobstore: alias of the job store to store the job in + :param str|unicode executor: alias of the executor to run the job with + :param bool replace_existing: ``True`` to replace an existing job with the same ``id`` + (but retain the number of runs from the existing one) + :rtype: Job + + """ + job_kwargs = { + 'trigger': self._create_trigger(trigger, trigger_args), + 'executor': executor, + 'func': func, + 'args': tuple(args) if args is not None else (), + 'kwargs': dict(kwargs) if kwargs is not None else {}, + 'id': id, + 'name': name, + 'misfire_grace_time': misfire_grace_time, + 'coalesce': coalesce, + 'max_instances': max_instances, + 'next_run_time': next_run_time + } + job_kwargs = dict((key, value) for key, value in six.iteritems(job_kwargs) if + value is not undefined) + job = Job(self, **job_kwargs) + + # Don't really add jobs to job stores before the scheduler is up and running + with self._jobstores_lock: + if self.state == STATE_STOPPED: + self._pending_jobs.append((job, jobstore, replace_existing)) + self._logger.info('Adding job tentatively -- it will be properly scheduled when ' + 'the scheduler starts') + else: + self._real_add_job(job, jobstore, replace_existing) + + return job + + def scheduled_job(self, trigger, args=None, kwargs=None, id=None, name=None, + misfire_grace_time=undefined, coalesce=undefined, max_instances=undefined, + next_run_time=undefined, jobstore='default', executor='default', + **trigger_args): + """ + scheduled_job(trigger, args=None, kwargs=None, id=None, \ + name=None, misfire_grace_time=undefined, \ + coalesce=undefined, max_instances=undefined, \ + next_run_time=undefined, jobstore='default', \ + executor='default',**trigger_args) + + A decorator version of :meth:`add_job`, except that ``replace_existing`` is always + ``True``. + + .. important:: The ``id`` argument must be given if scheduling a job in a persistent job + store. The scheduler cannot, however, enforce this requirement. + + """ + def inner(func): + self.add_job(func, trigger, args, kwargs, id, name, misfire_grace_time, coalesce, + max_instances, next_run_time, jobstore, executor, True, **trigger_args) + return func + return inner + + def modify_job(self, job_id, jobstore=None, **changes): + """ + Modifies the properties of a single job. + + Modifications are passed to this method as extra keyword arguments. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job: the relevant job instance + + """ + with self._jobstores_lock: + job, jobstore = self._lookup_job(job_id, jobstore) + job._modify(**changes) + if jobstore: + self._lookup_jobstore(jobstore).update_job(job) + + self._dispatch_event(JobEvent(EVENT_JOB_MODIFIED, job_id, jobstore)) + + # Wake up the scheduler since the job's next run time may have been changed + if self.state == STATE_RUNNING: + self.wakeup() + + return job + + def reschedule_job(self, job_id, jobstore=None, trigger=None, **trigger_args): + """ + Constructs a new trigger for a job and updates its next run time. + + Extra keyword arguments are passed directly to the trigger's constructor. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :param trigger: alias of the trigger type or a trigger instance + :return Job: the relevant job instance + + """ + trigger = self._create_trigger(trigger, trigger_args) + now = datetime.now(self.timezone) + next_run_time = trigger.get_next_fire_time(None, now) + return self.modify_job(job_id, jobstore, trigger=trigger, next_run_time=next_run_time) + + def pause_job(self, job_id, jobstore=None): + """ + Causes the given job not to be executed until it is explicitly resumed. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job: the relevant job instance + + """ + return self.modify_job(job_id, jobstore, next_run_time=None) + + def resume_job(self, job_id, jobstore=None): + """ + Resumes the schedule of the given job, or removes the job if its schedule is finished. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :return Job|None: the relevant job instance if the job was rescheduled, or ``None`` if no + next run time could be calculated and the job was removed + + """ + with self._jobstores_lock: + job, jobstore = self._lookup_job(job_id, jobstore) + now = datetime.now(self.timezone) + next_run_time = job.trigger.get_next_fire_time(None, now) + if next_run_time: + return self.modify_job(job_id, jobstore, next_run_time=next_run_time) + else: + self.remove_job(job.id, jobstore) + + def get_jobs(self, jobstore=None, pending=None): + """ + Returns a list of pending jobs (if the scheduler hasn't been started yet) and scheduled + jobs, either from a specific job store or from all of them. + + If the scheduler has not been started yet, only pending jobs can be returned because the + job stores haven't been started yet either. + + :param str|unicode jobstore: alias of the job store + :param bool pending: **DEPRECATED** + :rtype: list[Job] + + """ + if pending is not None: + warnings.warn('The "pending" option is deprecated -- get_jobs() always returns ' + 'pending jobs if the scheduler has been started and scheduled jobs ' + 'otherwise', DeprecationWarning) + + with self._jobstores_lock: + jobs = [] + if self.state == STATE_STOPPED: + for job, alias, replace_existing in self._pending_jobs: + if jobstore is None or alias == jobstore: + jobs.append(job) + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore is None or alias == jobstore: + jobs.extend(store.get_all_jobs()) + + return jobs + + def get_job(self, job_id, jobstore=None): + """ + Returns the Job that matches the given ``job_id``. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that most likely contains the job + :return: the Job by the given ID, or ``None`` if it wasn't found + :rtype: Job + + """ + with self._jobstores_lock: + try: + return self._lookup_job(job_id, jobstore)[0] + except JobLookupError: + return + + def remove_job(self, job_id, jobstore=None): + """ + Removes a job, preventing it from being run any more. + + :param str|unicode job_id: the identifier of the job + :param str|unicode jobstore: alias of the job store that contains the job + :raises JobLookupError: if the job was not found + + """ + jobstore_alias = None + with self._jobstores_lock: + if self.state == STATE_STOPPED: + # Check if the job is among the pending jobs + if self.state == STATE_STOPPED: + for i, (job, alias, replace_existing) in enumerate(self._pending_jobs): + if job.id == job_id and jobstore in (None, alias): + del self._pending_jobs[i] + jobstore_alias = alias + break + else: + # Otherwise, try to remove it from each store until it succeeds or we run out of + # stores to check + for alias, store in six.iteritems(self._jobstores): + if jobstore in (None, alias): + try: + store.remove_job(job_id) + jobstore_alias = alias + break + except JobLookupError: + continue + + if jobstore_alias is None: + raise JobLookupError(job_id) + + # Notify listeners that a job has been removed + event = JobEvent(EVENT_JOB_REMOVED, job_id, jobstore_alias) + self._dispatch_event(event) + + self._logger.info('Removed job %s', job_id) + + def remove_all_jobs(self, jobstore=None): + """ + Removes all jobs from the specified job store, or all job stores if none is given. + + :param str|unicode jobstore: alias of the job store + + """ + with self._jobstores_lock: + if self.state == STATE_STOPPED: + if jobstore: + self._pending_jobs = [pending for pending in self._pending_jobs if + pending[1] != jobstore] + else: + self._pending_jobs = [] + else: + for alias, store in six.iteritems(self._jobstores): + if jobstore in (None, alias): + store.remove_all_jobs() + + self._dispatch_event(SchedulerEvent(EVENT_ALL_JOBS_REMOVED, jobstore)) + + def print_jobs(self, jobstore=None, out=None): + """ + print_jobs(jobstore=None, out=sys.stdout) + + Prints out a textual listing of all jobs currently scheduled on either all job stores or + just a specific one. + + :param str|unicode jobstore: alias of the job store, ``None`` to list jobs from all stores + :param file out: a file-like object to print to (defaults to **sys.stdout** if nothing is + given) + + """ + out = out or sys.stdout + with self._jobstores_lock: + if self.state == STATE_STOPPED: + print(u'Pending jobs:', file=out) + if self._pending_jobs: + for job, jobstore_alias, replace_existing in self._pending_jobs: + if jobstore in (None, jobstore_alias): + print(u' %s' % job, file=out) + else: + print(u' No pending jobs', file=out) + else: + for alias, store in sorted(six.iteritems(self._jobstores)): + if jobstore in (None, alias): + print(u'Jobstore %s:' % alias, file=out) + jobs = store.get_all_jobs() + if jobs: + for job in jobs: + print(u' %s' % job, file=out) + else: + print(u' No scheduled jobs', file=out) + + @abstractmethod + def wakeup(self): + """ + Notifies the scheduler that there may be jobs due for execution. + Triggers :meth:`_process_jobs` to be run in an implementation specific manner. + """ + + # + # Private API + # + + def _configure(self, config): + # Set general options + self._logger = maybe_ref(config.pop('logger', None)) or getLogger('apscheduler.scheduler') + self.timezone = astimezone(config.pop('timezone', None)) or get_localzone() + self.jobstore_retry_interval = float(config.pop('jobstore_retry_interval', 10)) + + # Set the job defaults + job_defaults = config.get('job_defaults', {}) + self._job_defaults = { + 'misfire_grace_time': asint(job_defaults.get('misfire_grace_time', 1)), + 'coalesce': asbool(job_defaults.get('coalesce', True)), + 'max_instances': asint(job_defaults.get('max_instances', 1)) + } + + # Configure executors + self._executors.clear() + for alias, value in six.iteritems(config.get('executors', {})): + if isinstance(value, BaseExecutor): + self.add_executor(value, alias) + elif isinstance(value, MutableMapping): + executor_class = value.pop('class', None) + plugin = value.pop('type', None) + if plugin: + executor = self._create_plugin_instance('executor', plugin, value) + elif executor_class: + cls = maybe_ref(executor_class) + executor = cls(**value) + else: + raise ValueError( + 'Cannot create executor "%s" -- either "type" or "class" must be defined' % + alias) + + self.add_executor(executor, alias) + else: + raise TypeError( + "Expected executor instance or dict for executors['%s'], got %s instead" % + (alias, value.__class__.__name__)) + + # Configure job stores + self._jobstores.clear() + for alias, value in six.iteritems(config.get('jobstores', {})): + if isinstance(value, BaseJobStore): + self.add_jobstore(value, alias) + elif isinstance(value, MutableMapping): + jobstore_class = value.pop('class', None) + plugin = value.pop('type', None) + if plugin: + jobstore = self._create_plugin_instance('jobstore', plugin, value) + elif jobstore_class: + cls = maybe_ref(jobstore_class) + jobstore = cls(**value) + else: + raise ValueError( + 'Cannot create job store "%s" -- either "type" or "class" must be ' + 'defined' % alias) + + self.add_jobstore(jobstore, alias) + else: + raise TypeError( + "Expected job store instance or dict for jobstores['%s'], got %s instead" % + (alias, value.__class__.__name__)) + + def _create_default_executor(self): + """Creates a default executor store, specific to the particular scheduler type.""" + return ThreadPoolExecutor() + + def _create_default_jobstore(self): + """Creates a default job store, specific to the particular scheduler type.""" + return MemoryJobStore() + + def _lookup_executor(self, alias): + """ + Returns the executor instance by the given name from the list of executors that were added + to this scheduler. + + :type alias: str + :raises KeyError: if no executor by the given alias is not found + + """ + try: + return self._executors[alias] + except KeyError: + raise KeyError('No such executor: %s' % alias) + + def _lookup_jobstore(self, alias): + """ + Returns the job store instance by the given name from the list of job stores that were + added to this scheduler. + + :type alias: str + :raises KeyError: if no job store by the given alias is not found + + """ + try: + return self._jobstores[alias] + except KeyError: + raise KeyError('No such job store: %s' % alias) + + def _lookup_job(self, job_id, jobstore_alias): + """ + Finds a job by its ID. + + :type job_id: str + :param str jobstore_alias: alias of a job store to look in + :return tuple[Job, str]: a tuple of job, jobstore alias (jobstore alias is None in case of + a pending job) + :raises JobLookupError: if no job by the given ID is found. + + """ + if self.state == STATE_STOPPED: + # Check if the job is among the pending jobs + for job, alias, replace_existing in self._pending_jobs: + if job.id == job_id: + return job, None + else: + # Look in all job stores + for alias, store in six.iteritems(self._jobstores): + if jobstore_alias in (None, alias): + job = store.lookup_job(job_id) + if job is not None: + return job, alias + + raise JobLookupError(job_id) + + def _dispatch_event(self, event): + """ + Dispatches the given event to interested listeners. + + :param SchedulerEvent event: the event to send + + """ + with self._listeners_lock: + listeners = tuple(self._listeners) + + for cb, mask in listeners: + if event.code & mask: + try: + cb(event) + except: + self._logger.exception('Error notifying listener') + + def _real_add_job(self, job, jobstore_alias, replace_existing): + """ + :param Job job: the job to add + :param bool replace_existing: ``True`` to use update_job() in case the job already exists + in the store + + """ + # Fill in undefined values with defaults + replacements = {} + for key, value in six.iteritems(self._job_defaults): + if not hasattr(job, key): + replacements[key] = value + + # Calculate the next run time if there is none defined + if not hasattr(job, 'next_run_time'): + now = datetime.now(self.timezone) + replacements['next_run_time'] = job.trigger.get_next_fire_time(None, now) + + # Apply any replacements + job._modify(**replacements) + + # Add the job to the given job store + store = self._lookup_jobstore(jobstore_alias) + try: + store.add_job(job) + except ConflictingIdError: + if replace_existing: + store.update_job(job) + else: + raise + + # Mark the job as no longer pending + job._jobstore_alias = jobstore_alias + + # Notify listeners that a new job has been added + event = JobEvent(EVENT_JOB_ADDED, job.id, jobstore_alias) + self._dispatch_event(event) + + self._logger.info('Added job "%s" to job store "%s"', job.name, jobstore_alias) + + # Notify the scheduler about the new job + if self.state == STATE_RUNNING: + self.wakeup() + + def _create_plugin_instance(self, type_, alias, constructor_kwargs): + """Creates an instance of the given plugin type, loading the plugin first if necessary.""" + plugin_container, class_container, base_class = { + 'trigger': (self._trigger_plugins, self._trigger_classes, BaseTrigger), + 'jobstore': (self._jobstore_plugins, self._jobstore_classes, BaseJobStore), + 'executor': (self._executor_plugins, self._executor_classes, BaseExecutor) + }[type_] + + try: + plugin_cls = class_container[alias] + except KeyError: + if alias in plugin_container: + plugin_cls = class_container[alias] = plugin_container[alias].load() + if not issubclass(plugin_cls, base_class): + raise TypeError('The {0} entry point does not point to a {0} class'. + format(type_)) + else: + raise LookupError('No {0} by the name "{1}" was found'.format(type_, alias)) + + return plugin_cls(**constructor_kwargs) + + def _create_trigger(self, trigger, trigger_args): + if isinstance(trigger, BaseTrigger): + return trigger + elif trigger is None: + trigger = 'date' + elif not isinstance(trigger, six.string_types): + raise TypeError('Expected a trigger instance or string, got %s instead' % + trigger.__class__.__name__) + + # Use the scheduler's time zone if nothing else is specified + trigger_args.setdefault('timezone', self.timezone) + + # Instantiate the trigger class + return self._create_plugin_instance('trigger', trigger, trigger_args) + + def _create_lock(self): + """Creates a reentrant lock object.""" + return RLock() + + def _process_jobs(self): + """ + Iterates through jobs in every jobstore, starts jobs that are due and figures out how long + to wait for the next round. + + If the ``get_due_jobs()`` call raises an exception, a new wakeup is scheduled in at least + ``jobstore_retry_interval`` seconds. + + """ + if self.state == STATE_PAUSED: + self._logger.debug('Scheduler is paused -- not processing jobs') + return None + + self._logger.debug('Looking for jobs to run') + now = datetime.now(self.timezone) + next_wakeup_time = None + events = [] + + with self._jobstores_lock: + for jobstore_alias, jobstore in six.iteritems(self._jobstores): + try: + due_jobs = jobstore.get_due_jobs(now) + except Exception as e: + # Schedule a wakeup at least in jobstore_retry_interval seconds + self._logger.warning('Error getting due jobs from job store %r: %s', + jobstore_alias, e) + retry_wakeup_time = now + timedelta(seconds=self.jobstore_retry_interval) + if not next_wakeup_time or next_wakeup_time > retry_wakeup_time: + next_wakeup_time = retry_wakeup_time + + continue + + for job in due_jobs: + # Look up the job's executor + try: + executor = self._lookup_executor(job.executor) + except: + self._logger.error( + 'Executor lookup ("%s") failed for job "%s" -- removing it from the ' + 'job store', job.executor, job) + self.remove_job(job.id, jobstore_alias) + continue + + run_times = job._get_run_times(now) + run_times = run_times[-1:] if run_times and job.coalesce else run_times + if run_times: + try: + executor.submit_job(job, run_times) + except MaxInstancesReachedError: + self._logger.warning( + 'Execution of job "%s" skipped: maximum number of running ' + 'instances reached (%d)', job, job.max_instances) + event = JobSubmissionEvent(EVENT_JOB_MAX_INSTANCES, job.id, + jobstore_alias, run_times) + events.append(event) + except: + self._logger.exception('Error submitting job "%s" to executor "%s"', + job, job.executor) + else: + event = JobSubmissionEvent(EVENT_JOB_SUBMITTED, job.id, jobstore_alias, + run_times) + events.append(event) + + # Update the job if it has a next execution time. + # Otherwise remove it from the job store. + job_next_run = job.trigger.get_next_fire_time(run_times[-1], now) + if job_next_run: + job._modify(next_run_time=job_next_run) + jobstore.update_job(job) + else: + self.remove_job(job.id, jobstore_alias) + + # Set a new next wakeup time if there isn't one yet or + # the jobstore has an even earlier one + jobstore_next_run_time = jobstore.get_next_run_time() + if jobstore_next_run_time and (next_wakeup_time is None or + jobstore_next_run_time < next_wakeup_time): + next_wakeup_time = jobstore_next_run_time.astimezone(self.timezone) + + # Dispatch collected events + for event in events: + self._dispatch_event(event) + + # Determine the delay until this method should be called again + if self.state == STATE_PAUSED: + wait_seconds = None + self._logger.debug('Scheduler is paused; waiting until resume() is called') + elif next_wakeup_time is None: + wait_seconds = None + self._logger.debug('No jobs; waiting until a job is added') + else: + wait_seconds = max(timedelta_seconds(next_wakeup_time - now), 0) + self._logger.debug('Next wakeup is due at %s (in %f seconds)', next_wakeup_time, + wait_seconds) + + return wait_seconds diff --git a/lib/apscheduler/schedulers/blocking.py b/lib/apscheduler/schedulers/blocking.py new file mode 100644 index 00000000..e6171575 --- /dev/null +++ b/lib/apscheduler/schedulers/blocking.py @@ -0,0 +1,33 @@ +from __future__ import absolute_import + +from threading import Event + +from apscheduler.schedulers.base import BaseScheduler, STATE_STOPPED +from apscheduler.util import TIMEOUT_MAX + + +class BlockingScheduler(BaseScheduler): + """ + A scheduler that runs in the foreground + (:meth:`~apscheduler.schedulers.base.BaseScheduler.start` will block). + """ + _event = None + + def start(self, *args, **kwargs): + self._event = Event() + super(BlockingScheduler, self).start(*args, **kwargs) + self._main_loop() + + def shutdown(self, wait=True): + super(BlockingScheduler, self).shutdown(wait) + self._event.set() + + def _main_loop(self): + wait_seconds = TIMEOUT_MAX + while self.state != STATE_STOPPED: + self._event.wait(wait_seconds) + self._event.clear() + wait_seconds = self._process_jobs() + + def wakeup(self): + self._event.set() diff --git a/lib/apscheduler/schedulers/gevent.py b/lib/apscheduler/schedulers/gevent.py new file mode 100644 index 00000000..d48ed74a --- /dev/null +++ b/lib/apscheduler/schedulers/gevent.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import + +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.schedulers.base import BaseScheduler + +try: + from gevent.event import Event + from gevent.lock import RLock + import gevent +except ImportError: # pragma: nocover + raise ImportError('GeventScheduler requires gevent installed') + + +class GeventScheduler(BlockingScheduler): + """A scheduler that runs as a Gevent greenlet.""" + + _greenlet = None + + def start(self, *args, **kwargs): + self._event = Event() + BaseScheduler.start(self, *args, **kwargs) + self._greenlet = gevent.spawn(self._main_loop) + return self._greenlet + + def shutdown(self, *args, **kwargs): + super(GeventScheduler, self).shutdown(*args, **kwargs) + self._greenlet.join() + del self._greenlet + + def _create_lock(self): + return RLock() + + def _create_default_executor(self): + from apscheduler.executors.gevent import GeventExecutor + return GeventExecutor() diff --git a/lib/apscheduler/schedulers/qt.py b/lib/apscheduler/schedulers/qt.py new file mode 100644 index 00000000..092533e9 --- /dev/null +++ b/lib/apscheduler/schedulers/qt.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import + +from apscheduler.schedulers.base import BaseScheduler + +try: + from PyQt5.QtCore import QObject, QTimer +except ImportError: # pragma: nocover + try: + from PyQt4.QtCore import QObject, QTimer + except ImportError: + try: + from PySide.QtCore import QObject, QTimer # flake8: noqa + except ImportError: + raise ImportError('QtScheduler requires either PyQt5, PyQt4 or PySide installed') + + +class QtScheduler(BaseScheduler): + """A scheduler that runs in a Qt event loop.""" + + _timer = None + + def shutdown(self, *args, **kwargs): + super(QtScheduler, self).shutdown(*args, **kwargs) + self._stop_timer() + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timer = QTimer.singleShot(wait_seconds * 1000, self._process_jobs) + + def _stop_timer(self): + if self._timer: + if self._timer.isActive(): + self._timer.stop() + del self._timer + + def wakeup(self): + self._start_timer(0) + + def _process_jobs(self): + wait_seconds = super(QtScheduler, self)._process_jobs() + self._start_timer(wait_seconds) diff --git a/lib/apscheduler/schedulers/tornado.py b/lib/apscheduler/schedulers/tornado.py new file mode 100644 index 00000000..0a9171f2 --- /dev/null +++ b/lib/apscheduler/schedulers/tornado.py @@ -0,0 +1,63 @@ +from __future__ import absolute_import + +from datetime import timedelta +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + from tornado.ioloop import IOLoop +except ImportError: # pragma: nocover + raise ImportError('TornadoScheduler requires tornado installed') + + +def run_in_ioloop(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + self._ioloop.add_callback(func, self, *args, **kwargs) + return wrapper + + +class TornadoScheduler(BaseScheduler): + """ + A scheduler that runs on a Tornado IOLoop. + + The default executor can run jobs based on native coroutines (``async def``). + + =========== =============================================================== + ``io_loop`` Tornado IOLoop instance to use (defaults to the global IO loop) + =========== =============================================================== + """ + + _ioloop = None + _timeout = None + + @run_in_ioloop + def shutdown(self, wait=True): + super(TornadoScheduler, self).shutdown(wait) + self._stop_timer() + + def _configure(self, config): + self._ioloop = maybe_ref(config.pop('io_loop', None)) or IOLoop.current() + super(TornadoScheduler, self)._configure(config) + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._timeout = self._ioloop.add_timeout(timedelta(seconds=wait_seconds), self.wakeup) + + def _stop_timer(self): + if self._timeout: + self._ioloop.remove_timeout(self._timeout) + del self._timeout + + def _create_default_executor(self): + from apscheduler.executors.tornado import TornadoExecutor + return TornadoExecutor() + + @run_in_ioloop + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) diff --git a/lib/apscheduler/schedulers/twisted.py b/lib/apscheduler/schedulers/twisted.py new file mode 100644 index 00000000..6b43a84b --- /dev/null +++ b/lib/apscheduler/schedulers/twisted.py @@ -0,0 +1,62 @@ +from __future__ import absolute_import + +from functools import wraps + +from apscheduler.schedulers.base import BaseScheduler +from apscheduler.util import maybe_ref + +try: + from twisted.internet import reactor as default_reactor +except ImportError: # pragma: nocover + raise ImportError('TwistedScheduler requires Twisted installed') + + +def run_in_reactor(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + self._reactor.callFromThread(func, self, *args, **kwargs) + return wrapper + + +class TwistedScheduler(BaseScheduler): + """ + A scheduler that runs on a Twisted reactor. + + Extra options: + + =========== ======================================================== + ``reactor`` Reactor instance to use (defaults to the global reactor) + =========== ======================================================== + """ + + _reactor = None + _delayedcall = None + + def _configure(self, config): + self._reactor = maybe_ref(config.pop('reactor', default_reactor)) + super(TwistedScheduler, self)._configure(config) + + @run_in_reactor + def shutdown(self, wait=True): + super(TwistedScheduler, self).shutdown(wait) + self._stop_timer() + + def _start_timer(self, wait_seconds): + self._stop_timer() + if wait_seconds is not None: + self._delayedcall = self._reactor.callLater(wait_seconds, self.wakeup) + + def _stop_timer(self): + if self._delayedcall and self._delayedcall.active(): + self._delayedcall.cancel() + del self._delayedcall + + @run_in_reactor + def wakeup(self): + self._stop_timer() + wait_seconds = self._process_jobs() + self._start_timer(wait_seconds) + + def _create_default_executor(self): + from apscheduler.executors.twisted import TwistedExecutor + return TwistedExecutor() diff --git a/lib/apscheduler/threadpool.py b/lib/apscheduler/threadpool.py deleted file mode 100644 index 8ec47da0..00000000 --- a/lib/apscheduler/threadpool.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Generic thread pool class. Modeled after Java's ThreadPoolExecutor. -Please note that this ThreadPool does *not* fully implement the PEP 3148 -ThreadPool! -""" - -from threading import Thread, Lock, currentThread -from weakref import ref -import logging -import atexit - -try: - from queue import Queue, Empty -except ImportError: - from Queue import Queue, Empty - -logger = logging.getLogger(__name__) -_threadpools = set() - - -# Worker threads are daemonic in order to let the interpreter exit without -# an explicit shutdown of the thread pool. The following trick is necessary -# to allow worker threads to finish cleanly. -def _shutdown_all(): - for pool_ref in tuple(_threadpools): - pool = pool_ref() - if pool: - pool.shutdown() - -atexit.register(_shutdown_all) - - -class ThreadPool(object): - def __init__(self, core_threads=0, max_threads=20, keepalive=1): - """ - :param core_threads: maximum number of persistent threads in the pool - :param max_threads: maximum number of total threads in the pool - :param thread_class: callable that creates a Thread object - :param keepalive: seconds to keep non-core worker threads waiting - for new tasks - """ - self.core_threads = core_threads - self.max_threads = max(max_threads, core_threads, 1) - self.keepalive = keepalive - self._queue = Queue() - self._threads_lock = Lock() - self._threads = set() - self._shutdown = False - - _threadpools.add(ref(self)) - logger.info('Started thread pool with %d core threads and %s maximum ' - 'threads', core_threads, max_threads or 'unlimited') - - def _adjust_threadcount(self): - self._threads_lock.acquire() - try: - if self.num_threads < self.max_threads: - self._add_thread(self.num_threads < self.core_threads) - finally: - self._threads_lock.release() - - def _add_thread(self, core): - t = Thread(target=self._run_jobs, args=(core,)) - t.setDaemon(True) - t.start() - self._threads.add(t) - - def _run_jobs(self, core): - logger.debug('Started worker thread') - block = True - timeout = None - if not core: - block = self.keepalive > 0 - timeout = self.keepalive - - while True: - try: - func, args, kwargs = self._queue.get(block, timeout) - except Empty: - break - - if self._shutdown: - break - - try: - func(*args, **kwargs) - except: - logger.exception('Error in worker thread') - - self._threads_lock.acquire() - self._threads.remove(currentThread()) - self._threads_lock.release() - - logger.debug('Exiting worker thread') - - @property - def num_threads(self): - return len(self._threads) - - def submit(self, func, *args, **kwargs): - if self._shutdown: - raise RuntimeError('Cannot schedule new tasks after shutdown') - - self._queue.put((func, args, kwargs)) - self._adjust_threadcount() - - def shutdown(self, wait=True): - if self._shutdown: - return - - logging.info('Shutting down thread pool') - self._shutdown = True - _threadpools.remove(ref(self)) - - self._threads_lock.acquire() - for _ in range(self.num_threads): - self._queue.put((None, None, None)) - self._threads_lock.release() - - if wait: - self._threads_lock.acquire() - threads = tuple(self._threads) - self._threads_lock.release() - for thread in threads: - thread.join() - - def __repr__(self): - if self.max_threads: - threadcount = '%d/%d' % (self.num_threads, self.max_threads) - else: - threadcount = '%d' % self.num_threads - - return '' % (id(self), threadcount) diff --git a/lib/apscheduler/triggers/__init__.py b/lib/apscheduler/triggers/__init__.py index 74a97884..e69de29b 100644 --- a/lib/apscheduler/triggers/__init__.py +++ b/lib/apscheduler/triggers/__init__.py @@ -1,3 +0,0 @@ -from apscheduler.triggers.cron import CronTrigger -from apscheduler.triggers.interval import IntervalTrigger -from apscheduler.triggers.simple import SimpleTrigger diff --git a/lib/apscheduler/triggers/base.py b/lib/apscheduler/triggers/base.py new file mode 100644 index 00000000..ba98632e --- /dev/null +++ b/lib/apscheduler/triggers/base.py @@ -0,0 +1,19 @@ +from abc import ABCMeta, abstractmethod + +import six + + +class BaseTrigger(six.with_metaclass(ABCMeta)): + """Abstract base class that defines the interface that every trigger must implement.""" + + __slots__ = () + + @abstractmethod + def get_next_fire_time(self, previous_fire_time, now): + """ + Returns the next datetime to fire on, If no such datetime can be calculated, returns + ``None``. + + :param datetime.datetime previous_fire_time: the previous time the trigger was fired + :param datetime.datetime now: current datetime + """ diff --git a/lib/apscheduler/triggers/cron/__init__.py b/lib/apscheduler/triggers/cron/__init__.py index 3f8d9a8f..eccee0c0 100644 --- a/lib/apscheduler/triggers/cron/__init__.py +++ b/lib/apscheduler/triggers/cron/__init__.py @@ -1,32 +1,73 @@ -from datetime import date, datetime +from datetime import datetime, timedelta -from apscheduler.triggers.cron.fields import * -from apscheduler.util import datetime_ceil, convert_to_datetime +from tzlocal import get_localzone +import six + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.triggers.cron.fields import ( + BaseField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES) +from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone -class CronTrigger(object): - FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', - 'minute', 'second') - FIELDS_MAP = {'year': BaseField, - 'month': BaseField, - 'week': WeekField, - 'day': DayOfMonthField, - 'day_of_week': DayOfWeekField, - 'hour': BaseField, - 'minute': BaseField, - 'second': BaseField} +class CronTrigger(BaseTrigger): + """ + Triggers when current time matches all specified time constraints, + similarly to how the UNIX cron scheduler works. - def __init__(self, **values): - self.start_date = values.pop('start_date', None) - if self.start_date: - self.start_date = convert_to_datetime(self.start_date) + :param int|str year: 4-digit year + :param int|str month: month (1-12) + :param int|str day: day of the (1-31) + :param int|str week: ISO week (1-53) + :param int|str day_of_week: number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun) + :param int|str hour: hour (0-23) + :param int|str minute: minute (0-59) + :param int|str second: second (0-59) + :param datetime|str start_date: earliest possible date/time to trigger on (inclusive) + :param datetime|str end_date: latest possible date/time to trigger on (inclusive) + :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults + to scheduler timezone) + .. note:: The first weekday is always **monday**. + """ + + FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second') + FIELDS_MAP = { + 'year': BaseField, + 'month': BaseField, + 'week': WeekField, + 'day': DayOfMonthField, + 'day_of_week': DayOfWeekField, + 'hour': BaseField, + 'minute': BaseField, + 'second': BaseField + } + + __slots__ = 'timezone', 'start_date', 'end_date', 'fields' + + def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None, + minute=None, second=None, start_date=None, end_date=None, timezone=None): + if timezone: + self.timezone = astimezone(timezone) + elif isinstance(start_date, datetime) and start_date.tzinfo: + self.timezone = start_date.tzinfo + elif isinstance(end_date, datetime) and end_date.tzinfo: + self.timezone = end_date.tzinfo + else: + self.timezone = get_localzone() + + self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date') + self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date') + + values = dict((key, value) for (key, value) in six.iteritems(locals()) + if key in self.FIELD_NAMES and value is not None) self.fields = [] + assign_defaults = False for field_name in self.FIELD_NAMES: if field_name in values: exprs = values.pop(field_name) is_default = False - elif not values: + assign_defaults = not values + elif assign_defaults: exprs = DEFAULT_VALUES[field_name] is_default = True else: @@ -39,18 +80,18 @@ class CronTrigger(object): def _increment_field_value(self, dateval, fieldnum): """ - Increments the designated field and resets all less significant fields - to their minimum values. + Increments the designated field and resets all less significant fields to their minimum + values. :type dateval: datetime :type fieldnum: int - :type amount: int + :return: a tuple containing the new date, and the number of the field that was actually + incremented :rtype: tuple - :return: a tuple containing the new date, and the number of the field - that was actually incremented """ - i = 0 + values = {} + i = 0 while i < len(self.fields): field = self.fields[i] if not field.REAL: @@ -77,7 +118,8 @@ class CronTrigger(object): values[field.name] = value + 1 i += 1 - return datetime(**values), fieldnum + difference = datetime(**values) - dateval.replace(tzinfo=None) + return self.timezone.normalize(dateval + difference), fieldnum def _set_field_value(self, dateval, fieldnum, new_value): values = {} @@ -90,13 +132,18 @@ class CronTrigger(object): else: values[field.name] = new_value - return datetime(**values) + return self.timezone.localize(datetime(**values)) + + def get_next_fire_time(self, previous_fire_time, now): + if previous_fire_time: + start_date = min(now, previous_fire_time + timedelta(microseconds=1)) + if start_date == previous_fire_time: + start_date += timedelta(microseconds=1) + else: + start_date = max(now, self.start_date) if self.start_date else now - def get_next_fire_time(self, start_date): - if self.start_date: - start_date = max(start_date, self.start_date) - next_date = datetime_ceil(start_date) fieldnum = 0 + next_date = datetime_ceil(start_date).astimezone(self.timezone) while 0 <= fieldnum < len(self.fields): field = self.fields[fieldnum] curr_value = field.get_value(next_date) @@ -104,32 +151,56 @@ class CronTrigger(object): if next_value is None: # No valid value was found - next_date, fieldnum = self._increment_field_value(next_date, - fieldnum - 1) + next_date, fieldnum = self._increment_field_value(next_date, fieldnum - 1) elif next_value > curr_value: # A valid, but higher than the starting value, was found if field.REAL: - next_date = self._set_field_value(next_date, fieldnum, - next_value) + next_date = self._set_field_value(next_date, fieldnum, next_value) fieldnum += 1 else: - next_date, fieldnum = self._increment_field_value(next_date, - fieldnum) + next_date, fieldnum = self._increment_field_value(next_date, fieldnum) else: # A valid value was found, no changes necessary fieldnum += 1 + # Return if the date has rolled past the end date + if self.end_date and next_date > self.end_date: + return None + if fieldnum >= 0: return next_date + def __getstate__(self): + return { + 'version': 1, + 'timezone': self.timezone, + 'start_date': self.start_date, + 'end_date': self.end_date, + 'fields': self.fields + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.timezone = state['timezone'] + self.start_date = state['start_date'] + self.end_date = state['end_date'] + self.fields = state['fields'] + def __str__(self): - options = ["%s='%s'" % (f.name, str(f)) for f in self.fields - if not f.is_default] + options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default] return 'cron[%s]' % (', '.join(options)) def __repr__(self): - options = ["%s='%s'" % (f.name, str(f)) for f in self.fields - if not f.is_default] + options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default] if self.start_date: - options.append("start_date='%s'" % self.start_date.isoformat(' ')) - return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options)) + options.append("start_date='%s'" % datetime_repr(self.start_date)) + return "<%s (%s, timezone='%s')>" % ( + self.__class__.__name__, ', '.join(options), self.timezone) diff --git a/lib/apscheduler/triggers/cron/expressions.py b/lib/apscheduler/triggers/cron/expressions.py index 018c7a30..21493d54 100644 --- a/lib/apscheduler/triggers/cron/expressions.py +++ b/lib/apscheduler/triggers/cron/expressions.py @@ -1,6 +1,4 @@ -""" -This module contains the expressions applicable for CronTrigger's fields. -""" +"""This module contains the expressions applicable for CronTrigger's fields.""" from calendar import monthrange import re @@ -8,7 +6,7 @@ import re from apscheduler.util import asint __all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression', - 'WeekdayPositionExpression') + 'WeekdayPositionExpression', 'LastDayOfMonthExpression') WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] @@ -37,6 +35,9 @@ class AllExpression(object): if next <= maxval: return next + def __eq__(self, other): + return isinstance(other, self.__class__) and self.step == other.step + def __str__(self): if self.step: return '*/%d' % self.step @@ -57,30 +58,30 @@ class RangeExpression(AllExpression): if last is None and step is None: last = first if last is not None and first > last: - raise ValueError('The minimum value in a range must not be ' - 'higher than the maximum') + raise ValueError('The minimum value in a range must not be higher than the maximum') self.first = first self.last = last def get_next_value(self, date, field): - start = field.get_value(date) + startval = field.get_value(date) minval = field.get_min(date) maxval = field.get_max(date) # Apply range limits minval = max(minval, self.first) - if self.last is not None: - maxval = min(maxval, self.last) - start = max(start, minval) + maxval = min(maxval, self.last) if self.last is not None else maxval + nextval = max(minval, startval) - if not self.step: - next = start - else: - distance_to_next = (self.step - (start - minval)) % self.step - next = start + distance_to_next + # Apply the step if defined + if self.step: + distance_to_next = (self.step - (nextval - minval)) % self.step + nextval += distance_to_next - if next <= maxval: - return next + return nextval if nextval <= maxval else None + + def __eq__(self, other): + return (isinstance(other, self.__class__) and self.first == other.first and + self.last == other.last) def __str__(self): if self.last != self.first and self.last is not None: @@ -102,8 +103,7 @@ class RangeExpression(AllExpression): class WeekdayRangeExpression(RangeExpression): - value_re = re.compile(r'(?P[a-z]+)(?:-(?P[a-z]+))?', - re.IGNORECASE) + value_re = re.compile(r'(?P[a-z]+)(?:-(?P[a-z]+))?', re.IGNORECASE) def __init__(self, first, last=None): try: @@ -135,8 +135,8 @@ class WeekdayRangeExpression(RangeExpression): class WeekdayPositionExpression(AllExpression): options = ['1st', '2nd', '3rd', '4th', '5th', 'last'] - value_re = re.compile(r'(?P%s) +(?P(?:\d+|\w+))' - % '|'.join(options), re.IGNORECASE) + value_re = re.compile(r'(?P%s) +(?P(?:\d+|\w+))' % + '|'.join(options), re.IGNORECASE) def __init__(self, option_name, weekday_name): try: @@ -150,8 +150,7 @@ class WeekdayPositionExpression(AllExpression): raise ValueError('Invalid weekday name "%s"' % weekday_name) def get_next_value(self, date, field): - # Figure out the weekday of the month's first day and the number - # of days in that month + # Figure out the weekday of the month's first day and the number of days in that month first_day_wday, last_day = monthrange(date.year, date.month) # Calculate which day of the month is the first of the target weekdays @@ -163,16 +162,34 @@ class WeekdayPositionExpression(AllExpression): if self.option_num < 5: target_day = first_hit_day + self.option_num * 7 else: - target_day = first_hit_day + ((last_day - first_hit_day) / 7) * 7 + target_day = first_hit_day + ((last_day - first_hit_day) // 7) * 7 if target_day <= last_day and target_day >= date.day: return target_day + def __eq__(self, other): + return (super(WeekdayPositionExpression, self).__eq__(other) and + self.option_num == other.option_num and self.weekday == other.weekday) + def __str__(self): - return '%s %s' % (self.options[self.option_num], - WEEKDAYS[self.weekday]) + return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday]) def __repr__(self): - return "%s('%s', '%s')" % (self.__class__.__name__, - self.options[self.option_num], + return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num], WEEKDAYS[self.weekday]) + + +class LastDayOfMonthExpression(AllExpression): + value_re = re.compile(r'last', re.IGNORECASE) + + def __init__(self): + pass + + def get_next_value(self, date, field): + return monthrange(date.year, date.month)[1] + + def __str__(self): + return 'last' + + def __repr__(self): + return "%s()" % self.__class__.__name__ diff --git a/lib/apscheduler/triggers/cron/fields.py b/lib/apscheduler/triggers/cron/fields.py index ef970cc9..892bc13f 100644 --- a/lib/apscheduler/triggers/cron/fields.py +++ b/lib/apscheduler/triggers/cron/fields.py @@ -1,22 +1,22 @@ -""" -Fields represent CronTrigger options which map to :class:`~datetime.datetime` -fields. -""" +"""Fields represent CronTrigger options which map to :class:`~datetime.datetime` fields.""" from calendar import monthrange -from apscheduler.triggers.cron.expressions import * - -__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', - 'WeekField', 'DayOfMonthField', 'DayOfWeekField') +from apscheduler.triggers.cron.expressions import ( + AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression, + WeekdayRangeExpression) -MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, - 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0} -MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, - 'day_of_week': 6, 'hour': 23, 'minute': 59, 'second': 59} -DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', - 'day_of_week': '*', 'hour': 0, 'minute': 0, 'second': 0} +__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField', + 'DayOfMonthField', 'DayOfWeekField') + + +MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0, + 'minute': 0, 'second': 0} +MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, 'day_of_week': 6, 'hour': 23, + 'minute': 59, 'second': 59} +DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0, + 'minute': 0, 'second': 0} class BaseField(object): @@ -65,16 +65,17 @@ class BaseField(object): self.expressions.append(compiled_expr) return - raise ValueError('Unrecognized expression "%s" for field "%s"' % - (expr, self.name)) + raise ValueError('Unrecognized expression "%s" for field "%s"' % (expr, self.name)) + + def __eq__(self, other): + return isinstance(self, self.__class__) and self.expressions == other.expressions def __str__(self): expr_strings = (str(e) for e in self.expressions) return ','.join(expr_strings) def __repr__(self): - return "%s('%s', '%s')" % (self.__class__.__name__, self.name, - str(self)) + return "%s('%s', '%s')" % (self.__class__.__name__, self.name, self) class WeekField(BaseField): @@ -85,7 +86,7 @@ class WeekField(BaseField): class DayOfMonthField(BaseField): - COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression] + COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression, LastDayOfMonthExpression] def get_max(self, dateval): return monthrange(dateval.year, dateval.month)[1] diff --git a/lib/apscheduler/triggers/date.py b/lib/apscheduler/triggers/date.py new file mode 100644 index 00000000..07681008 --- /dev/null +++ b/lib/apscheduler/triggers/date.py @@ -0,0 +1,51 @@ +from datetime import datetime + +from tzlocal import get_localzone + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import convert_to_datetime, datetime_repr, astimezone + + +class DateTrigger(BaseTrigger): + """ + Triggers once on the given datetime. If ``run_date`` is left empty, current time is used. + + :param datetime|str run_date: the date/time to run the job at + :param datetime.tzinfo|str timezone: time zone for ``run_date`` if it doesn't have one already + """ + + __slots__ = 'run_date' + + def __init__(self, run_date=None, timezone=None): + timezone = astimezone(timezone) or get_localzone() + if run_date is not None: + self.run_date = convert_to_datetime(run_date, timezone, 'run_date') + else: + self.run_date = datetime.now(timezone) + + def get_next_fire_time(self, previous_fire_time, now): + return self.run_date if previous_fire_time is None else None + + def __getstate__(self): + return { + 'version': 1, + 'run_date': self.run_date + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.run_date = state['run_date'] + + def __str__(self): + return 'date[%s]' % datetime_repr(self.run_date) + + def __repr__(self): + return "<%s (run_date='%s')>" % (self.__class__.__name__, datetime_repr(self.run_date)) diff --git a/lib/apscheduler/triggers/interval.py b/lib/apscheduler/triggers/interval.py index dd16d777..fec912a2 100644 --- a/lib/apscheduler/triggers/interval.py +++ b/lib/apscheduler/triggers/interval.py @@ -1,39 +1,92 @@ -from datetime import datetime, timedelta +from datetime import timedelta, datetime from math import ceil -from apscheduler.util import convert_to_datetime, timedelta_seconds +from tzlocal import get_localzone + +from apscheduler.triggers.base import BaseTrigger +from apscheduler.util import convert_to_datetime, timedelta_seconds, datetime_repr, astimezone -class IntervalTrigger(object): - def __init__(self, interval, start_date=None): - if not isinstance(interval, timedelta): - raise TypeError('interval must be a timedelta') - if start_date: - start_date = convert_to_datetime(start_date) +class IntervalTrigger(BaseTrigger): + """ + Triggers on specified intervals, starting on ``start_date`` if specified, ``datetime.now()`` + + interval otherwise. - self.interval = interval + :param int weeks: number of weeks to wait + :param int days: number of days to wait + :param int hours: number of hours to wait + :param int minutes: number of minutes to wait + :param int seconds: number of seconds to wait + :param datetime|str start_date: starting point for the interval calculation + :param datetime|str end_date: latest possible date/time to trigger on + :param datetime.tzinfo|str timezone: time zone to use for the date/time calculations + """ + + __slots__ = 'timezone', 'start_date', 'end_date', 'interval', 'interval_length' + + def __init__(self, weeks=0, days=0, hours=0, minutes=0, seconds=0, start_date=None, + end_date=None, timezone=None): + self.interval = timedelta(weeks=weeks, days=days, hours=hours, minutes=minutes, + seconds=seconds) self.interval_length = timedelta_seconds(self.interval) if self.interval_length == 0: self.interval = timedelta(seconds=1) self.interval_length = 1 - if start_date is None: - self.start_date = datetime.now() + self.interval + if timezone: + self.timezone = astimezone(timezone) + elif isinstance(start_date, datetime) and start_date.tzinfo: + self.timezone = start_date.tzinfo + elif isinstance(end_date, datetime) and end_date.tzinfo: + self.timezone = end_date.tzinfo else: - self.start_date = convert_to_datetime(start_date) + self.timezone = get_localzone() - def get_next_fire_time(self, start_date): - if start_date < self.start_date: - return self.start_date + start_date = start_date or (datetime.now(self.timezone) + self.interval) + self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date') + self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date') - timediff_seconds = timedelta_seconds(start_date - self.start_date) - next_interval_num = int(ceil(timediff_seconds / self.interval_length)) - return self.start_date + self.interval * next_interval_num + def get_next_fire_time(self, previous_fire_time, now): + if previous_fire_time: + next_fire_time = previous_fire_time + self.interval + elif self.start_date > now: + next_fire_time = self.start_date + else: + timediff_seconds = timedelta_seconds(now - self.start_date) + next_interval_num = int(ceil(timediff_seconds / self.interval_length)) + next_fire_time = self.start_date + self.interval * next_interval_num + + if not self.end_date or next_fire_time <= self.end_date: + return self.timezone.normalize(next_fire_time) + + def __getstate__(self): + return { + 'version': 1, + 'timezone': self.timezone, + 'start_date': self.start_date, + 'end_date': self.end_date, + 'interval': self.interval + } + + def __setstate__(self, state): + # This is for compatibility with APScheduler 3.0.x + if isinstance(state, tuple): + state = state[1] + + if state.get('version', 1) > 1: + raise ValueError( + 'Got serialized data for version %s of %s, but only version 1 can be handled' % + (state['version'], self.__class__.__name__)) + + self.timezone = state['timezone'] + self.start_date = state['start_date'] + self.end_date = state['end_date'] + self.interval = state['interval'] + self.interval_length = timedelta_seconds(self.interval) def __str__(self): return 'interval[%s]' % str(self.interval) def __repr__(self): - return "<%s (interval=%s, start_date=%s)>" % ( - self.__class__.__name__, repr(self.interval), - repr(self.start_date)) + return "<%s (interval=%r, start_date='%s', timezone='%s')>" % ( + self.__class__.__name__, self.interval, datetime_repr(self.start_date), self.timezone) diff --git a/lib/apscheduler/triggers/simple.py b/lib/apscheduler/triggers/simple.py deleted file mode 100644 index ea61b3f1..00000000 --- a/lib/apscheduler/triggers/simple.py +++ /dev/null @@ -1,17 +0,0 @@ -from apscheduler.util import convert_to_datetime - - -class SimpleTrigger(object): - def __init__(self, run_date): - self.run_date = convert_to_datetime(run_date) - - def get_next_fire_time(self, start_date): - if self.run_date >= start_date: - return self.run_date - - def __str__(self): - return 'date[%s]' % str(self.run_date) - - def __repr__(self): - return '<%s (run_date=%s)>' % ( - self.__class__.__name__, repr(self.run_date)) diff --git a/lib/apscheduler/util.py b/lib/apscheduler/util.py index a49aaed8..63ac8ac8 100644 --- a/lib/apscheduler/util.py +++ b/lib/apscheduler/util.py @@ -1,26 +1,50 @@ -""" -This module contains several handy functions primarily meant for internal use. -""" +"""This module contains several handy functions primarily meant for internal use.""" -from datetime import date, datetime, timedelta -from time import mktime +from __future__ import division +from datetime import date, datetime, time, timedelta, tzinfo +from calendar import timegm import re -import sys -from types import MethodType +from functools import partial -__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds', - 'time_difference', 'datetime_ceil', 'combine_opts', - 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref', - 'to_unicode', 'iteritems', 'itervalues', 'xrange') +from pytz import timezone, utc +import six + +try: + from inspect import signature +except ImportError: # pragma: nocover + from funcsigs import signature + +try: + from threading import TIMEOUT_MAX +except ImportError: + TIMEOUT_MAX = 4294967 # Maximum value accepted by Event.wait() on Windows + +__all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp', + 'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name', + 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args') + + +class _Undefined(object): + def __nonzero__(self): + return False + + def __bool__(self): + return False + + def __repr__(self): + return '' + + +undefined = _Undefined() #: a unique object that only signifies that no value is defined def asint(text): """ - Safely converts a string to an integer, returning None if the string - is None. + Safely converts a string to an integer, returning ``None`` if the string is ``None``. :type text: str :rtype: int + """ if text is not None: return int(text) @@ -31,6 +55,7 @@ def asbool(obj): Interprets an object as a boolean value. :rtype: bool + """ if isinstance(obj, str): obj = obj.strip().lower() @@ -42,36 +67,105 @@ def asbool(obj): return bool(obj) +def astimezone(obj): + """ + Interprets an object as a timezone. + + :rtype: tzinfo + + """ + if isinstance(obj, six.string_types): + return timezone(obj) + if isinstance(obj, tzinfo): + if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'): + raise TypeError('Only timezones from the pytz library are supported') + if obj.zone == 'local': + raise ValueError( + 'Unable to determine the name of the local timezone -- you must explicitly ' + 'specify the name of the local timezone. Please refrain from using timezones like ' + 'EST to prevent problems with daylight saving time. Instead, use a locale based ' + 'timezone name (such as Europe/Helsinki).') + return obj + if obj is not None: + raise TypeError('Expected tzinfo, got %s instead' % obj.__class__.__name__) + + _DATE_REGEX = re.compile( r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' r'(?: (?P\d{1,2}):(?P\d{1,2}):(?P\d{1,2})' r'(?:\.(?P\d{1,6}))?)?') -def convert_to_datetime(input): +def convert_to_datetime(input, tz, arg_name): """ - Converts the given object to a datetime object, if possible. - If an actual datetime object is passed, it is returned unmodified. - If the input is a string, it is parsed as a datetime. + Converts the given object to a timezone aware datetime object. - Date strings are accepted in three different forms: date only (Y-m-d), - date with time (Y-m-d H:M:S) or with date+time with microseconds - (Y-m-d H:M:S.micro). + If a timezone aware datetime object is passed, it is returned unmodified. + If a native datetime object is passed, it is given the specified timezone. + If the input is a string, it is parsed as a datetime with the given timezone. + Date strings are accepted in three different forms: date only (Y-m-d), date with time + (Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). + + :param str|datetime input: the datetime or string to convert to a timezone aware datetime + :param datetime.tzinfo tz: timezone to interpret ``input`` in + :param str arg_name: the name of the argument (used in an error message) :rtype: datetime + """ - if isinstance(input, datetime): - return input + if input is None: + return + elif isinstance(input, datetime): + datetime_ = input elif isinstance(input, date): - return datetime.fromordinal(input.toordinal()) - elif isinstance(input, str): + datetime_ = datetime.combine(input, time()) + elif isinstance(input, six.string_types): m = _DATE_REGEX.match(input) if not m: raise ValueError('Invalid date string') values = [(k, int(v or 0)) for k, v in m.groupdict().items()] values = dict(values) - return datetime(**values) - raise TypeError('Unsupported input type: %s' % type(input)) + datetime_ = datetime(**values) + else: + raise TypeError('Unsupported type for %s: %s' % (arg_name, input.__class__.__name__)) + + if datetime_.tzinfo is not None: + return datetime_ + if tz is None: + raise ValueError( + 'The "tz" argument must be specified if %s has no timezone information' % arg_name) + if isinstance(tz, six.string_types): + tz = timezone(tz) + + try: + return tz.localize(datetime_, is_dst=None) + except AttributeError: + raise TypeError( + 'Only pytz timezones are supported (need the localize() and normalize() methods)') + + +def datetime_to_utc_timestamp(timeval): + """ + Converts a datetime instance to a timestamp. + + :type timeval: datetime + :rtype: float + + """ + if timeval is not None: + return timegm(timeval.utctimetuple()) + timeval.microsecond / 1000000 + + +def utc_timestamp_to_datetime(timestamp): + """ + Converts the given timestamp to a datetime instance. + + :type timestamp: float + :rtype: datetime + + """ + if timestamp is not None: + return datetime.fromtimestamp(timestamp, utc) def timedelta_seconds(delta): @@ -80,151 +174,212 @@ def timedelta_seconds(delta): :type delta: timedelta :rtype: float + """ return delta.days * 24 * 60 * 60 + delta.seconds + \ delta.microseconds / 1000000.0 -def time_difference(date1, date2): - """ - Returns the time difference in seconds between the given two - datetime objects. The difference is calculated as: date1 - date2. - - :param date1: the later datetime - :type date1: datetime - :param date2: the earlier datetime - :type date2: datetime - :rtype: float - """ - later = mktime(date1.timetuple()) + date1.microsecond / 1000000.0 - earlier = mktime(date2.timetuple()) + date2.microsecond / 1000000.0 - return later - earlier - - def datetime_ceil(dateval): """ Rounds the given datetime object upwards. :type dateval: datetime + """ if dateval.microsecond > 0: - return dateval + timedelta(seconds=1, - microseconds= -dateval.microsecond) + return dateval + timedelta(seconds=1, microseconds=-dateval.microsecond) return dateval -def combine_opts(global_config, prefix, local_config={}): - """ - Returns a subdictionary from keys and values of ``global_config`` where - the key starts with the given prefix, combined with options from - local_config. The keys in the subdictionary have the prefix removed. - - :type global_config: dict - :type prefix: str - :type local_config: dict - :rtype: dict - """ - prefixlen = len(prefix) - subconf = {} - for key, value in global_config.items(): - if key.startswith(prefix): - key = key[prefixlen:] - subconf[key] = value - subconf.update(local_config) - return subconf +def datetime_repr(dateval): + return dateval.strftime('%Y-%m-%d %H:%M:%S %Z') if dateval else 'None' def get_callable_name(func): """ Returns the best available display name for the given function/callable. + + :rtype: str + """ + # the easy case (on Python 3.3+) + if hasattr(func, '__qualname__'): + return func.__qualname__ + + # class methods, bound and unbound methods f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None) - if f_self and hasattr(func, '__name__'): - if isinstance(f_self, type): - # class method - return '%s.%s' % (f_self.__name__, func.__name__) - # bound method - return '%s.%s' % (f_self.__class__.__name__, func.__name__) + f_class = f_self if isinstance(f_self, type) else f_self.__class__ + else: + f_class = getattr(func, 'im_class', None) + if f_class and hasattr(func, '__name__'): + return '%s.%s' % (f_class.__name__, func.__name__) + + # class or class instance if hasattr(func, '__call__'): + # class if hasattr(func, '__name__'): - # function, unbound method or a class with a __call__ method return func.__name__ + # instance of a class with a __call__ method return func.__class__.__name__ - raise TypeError('Unable to determine a name for %s -- ' - 'maybe it is not a callable?' % repr(func)) + raise TypeError('Unable to determine a name for %r -- maybe it is not a callable?' % func) def obj_to_ref(obj): """ - Returns the path to the given object. + Returns the path to the given callable. + + :rtype: str + :raises TypeError: if the given object is not callable + :raises ValueError: if the given object is a :class:`~functools.partial`, lambda or a nested + function + """ - ref = '%s:%s' % (obj.__module__, get_callable_name(obj)) - try: - obj2 = ref_to_obj(ref) - if obj != obj2: - raise ValueError - except Exception: - raise ValueError('Cannot determine the reference to %s' % repr(obj)) - - return ref + if isinstance(obj, partial): + raise ValueError('Cannot create a reference to a partial()') + + name = get_callable_name(obj) + if '' in name: + raise ValueError('Cannot create a reference to a lambda') + if '' in name: + raise ValueError('Cannot create a reference to a nested function') + + return '%s:%s' % (obj.__module__, name) def ref_to_obj(ref): """ Returns the object pointed to by ``ref``. + + :type ref: str + """ - if not isinstance(ref, basestring): + if not isinstance(ref, six.string_types): raise TypeError('References must be strings') - if not ':' in ref: + if ':' not in ref: raise ValueError('Invalid reference') modulename, rest = ref.split(':', 1) try: - obj = __import__(modulename) + obj = __import__(modulename, fromlist=[rest]) except ImportError: - raise LookupError('Error resolving reference %s: ' - 'could not import module' % ref) + raise LookupError('Error resolving reference %s: could not import module' % ref) try: - for name in modulename.split('.')[1:] + rest.split('.'): + for name in rest.split('.'): obj = getattr(obj, name) return obj except Exception: - raise LookupError('Error resolving reference %s: ' - 'error looking up object' % ref) + raise LookupError('Error resolving reference %s: error looking up object' % ref) def maybe_ref(ref): """ - Returns the object that the given reference points to, if it is indeed - a reference. If it is not a reference, the object is returned as-is. + Returns the object that the given reference points to, if it is indeed a reference. + If it is not a reference, the object is returned as-is. + """ if not isinstance(ref, str): return ref return ref_to_obj(ref) -def to_unicode(string, encoding='ascii'): - """ - Safely converts a string to a unicode representation on any - Python version. - """ - if hasattr(string, 'decode'): - return string.decode(encoding, 'ignore') - return string # pragma: nocover +if six.PY2: + def repr_escape(string): + if isinstance(string, six.text_type): + return string.encode('ascii', 'backslashreplace') + return string +else: + def repr_escape(string): + return string -if sys.version_info < (3, 0): # pragma: nocover - iteritems = lambda d: d.iteritems() - itervalues = lambda d: d.itervalues() - xrange = xrange - basestring = basestring -else: # pragma: nocover - iteritems = lambda d: d.items() - itervalues = lambda d: d.values() - xrange = range - basestring = str +def check_callable_args(func, args, kwargs): + """ + Ensures that the given callable can be called with the given arguments. + + :type args: tuple + :type kwargs: dict + + """ + pos_kwargs_conflicts = [] # parameters that have a match in both args and kwargs + positional_only_kwargs = [] # positional-only parameters that have a match in kwargs + unsatisfied_args = [] # parameters in signature that don't have a match in args or kwargs + unsatisfied_kwargs = [] # keyword-only arguments that don't have a match in kwargs + unmatched_args = list(args) # args that didn't match any of the parameters in the signature + # kwargs that didn't match any of the parameters in the signature + unmatched_kwargs = list(kwargs) + # indicates if the signature defines *args and **kwargs respectively + has_varargs = has_var_kwargs = False + + try: + sig = signature(func) + except ValueError: + # signature() doesn't work against every kind of callable + return + + for param in six.itervalues(sig.parameters): + if param.kind == param.POSITIONAL_OR_KEYWORD: + if param.name in unmatched_kwargs and unmatched_args: + pos_kwargs_conflicts.append(param.name) + elif unmatched_args: + del unmatched_args[0] + elif param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + elif param.default is param.empty: + unsatisfied_args.append(param.name) + elif param.kind == param.POSITIONAL_ONLY: + if unmatched_args: + del unmatched_args[0] + elif param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + positional_only_kwargs.append(param.name) + elif param.default is param.empty: + unsatisfied_args.append(param.name) + elif param.kind == param.KEYWORD_ONLY: + if param.name in unmatched_kwargs: + unmatched_kwargs.remove(param.name) + elif param.default is param.empty: + unsatisfied_kwargs.append(param.name) + elif param.kind == param.VAR_POSITIONAL: + has_varargs = True + elif param.kind == param.VAR_KEYWORD: + has_var_kwargs = True + + # Make sure there are no conflicts between args and kwargs + if pos_kwargs_conflicts: + raise ValueError('The following arguments are supplied in both args and kwargs: %s' % + ', '.join(pos_kwargs_conflicts)) + + # Check if keyword arguments are being fed to positional-only parameters + if positional_only_kwargs: + raise ValueError('The following arguments cannot be given as keyword arguments: %s' % + ', '.join(positional_only_kwargs)) + + # Check that the number of positional arguments minus the number of matched kwargs matches the + # argspec + if unsatisfied_args: + raise ValueError('The following arguments have not been supplied: %s' % + ', '.join(unsatisfied_args)) + + # Check that all keyword-only arguments have been supplied + if unsatisfied_kwargs: + raise ValueError( + 'The following keyword-only arguments have not been supplied in kwargs: %s' % + ', '.join(unsatisfied_kwargs)) + + # Check that the callable can accept the given number of positional arguments + if not has_varargs and unmatched_args: + raise ValueError( + 'The list of positional arguments is longer than the target callable can handle ' + '(allowed: %d, given in args: %d)' % (len(args) - len(unmatched_args), len(args))) + + # Check that the callable can accept the given keyword arguments + if not has_var_kwargs and unmatched_kwargs: + raise ValueError( + 'The target callable does not accept the following keyword arguments: %s' % + ', '.join(unmatched_kwargs)) diff --git a/lib/concurrent/LICENSE b/lib/concurrent/LICENSE new file mode 100644 index 00000000..a8d65b16 --- /dev/null +++ b/lib/concurrent/LICENSE @@ -0,0 +1,48 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/lib/concurrent/PKG-INFO b/lib/concurrent/PKG-INFO new file mode 100644 index 00000000..50dd8f09 --- /dev/null +++ b/lib/concurrent/PKG-INFO @@ -0,0 +1,16 @@ +Metadata-Version: 1.1 +Name: futures +Version: 3.1.1 +Summary: Backport of the concurrent.futures package from Python 3.2 +Home-page: https://github.com/agronholm/pythonfutures +Author: Alex Gronholm +Author-email: alex.gronholm+pypi@nextday.fi +License: PSF +Description: UNKNOWN +Platform: UNKNOWN +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 2 :: Only diff --git a/lib/concurrent/__init__.py b/lib/concurrent/__init__.py new file mode 100644 index 00000000..b36383a6 --- /dev/null +++ b/lib/concurrent/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/lib/concurrent/futures/__init__.py b/lib/concurrent/futures/__init__.py new file mode 100644 index 00000000..428b14bd --- /dev/null +++ b/lib/concurrent/futures/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Execute computations asynchronously using threads or processes.""" + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +from concurrent.futures._base import (FIRST_COMPLETED, + FIRST_EXCEPTION, + ALL_COMPLETED, + CancelledError, + TimeoutError, + Future, + Executor, + wait, + as_completed) +from concurrent.futures.thread import ThreadPoolExecutor + +try: + from concurrent.futures.process import ProcessPoolExecutor +except ImportError: + # some platforms don't have multiprocessing + pass diff --git a/lib/concurrent/futures/_base.py b/lib/concurrent/futures/_base.py new file mode 100644 index 00000000..ca2ebfb0 --- /dev/null +++ b/lib/concurrent/futures/_base.py @@ -0,0 +1,631 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +import collections +import logging +import threading +import itertools +import time +import types + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +FIRST_COMPLETED = 'FIRST_COMPLETED' +FIRST_EXCEPTION = 'FIRST_EXCEPTION' +ALL_COMPLETED = 'ALL_COMPLETED' +_AS_COMPLETED = '_AS_COMPLETED' + +# Possible future states (for internal use by the futures package). +PENDING = 'PENDING' +RUNNING = 'RUNNING' +# The future was cancelled by the user... +CANCELLED = 'CANCELLED' +# ...and _Waiter.add_cancelled() was called by a worker. +CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED' +FINISHED = 'FINISHED' + +_FUTURE_STATES = [ + PENDING, + RUNNING, + CANCELLED, + CANCELLED_AND_NOTIFIED, + FINISHED +] + +_STATE_TO_DESCRIPTION_MAP = { + PENDING: "pending", + RUNNING: "running", + CANCELLED: "cancelled", + CANCELLED_AND_NOTIFIED: "cancelled", + FINISHED: "finished" +} + +# Logger for internal use by the futures package. +LOGGER = logging.getLogger("concurrent.futures") + +class Error(Exception): + """Base class for all future-related exceptions.""" + pass + +class CancelledError(Error): + """The Future was cancelled.""" + pass + +class TimeoutError(Error): + """The operation exceeded the given deadline.""" + pass + +class _Waiter(object): + """Provides the event that wait() and as_completed() block on.""" + def __init__(self): + self.event = threading.Event() + self.finished_futures = [] + + def add_result(self, future): + self.finished_futures.append(future) + + def add_exception(self, future): + self.finished_futures.append(future) + + def add_cancelled(self, future): + self.finished_futures.append(future) + +class _AsCompletedWaiter(_Waiter): + """Used by as_completed().""" + + def __init__(self): + super(_AsCompletedWaiter, self).__init__() + self.lock = threading.Lock() + + def add_result(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + with self.lock: + super(_AsCompletedWaiter, self).add_cancelled(future) + self.event.set() + +class _FirstCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_COMPLETED).""" + + def add_result(self, future): + super(_FirstCompletedWaiter, self).add_result(future) + self.event.set() + + def add_exception(self, future): + super(_FirstCompletedWaiter, self).add_exception(future) + self.event.set() + + def add_cancelled(self, future): + super(_FirstCompletedWaiter, self).add_cancelled(future) + self.event.set() + +class _AllCompletedWaiter(_Waiter): + """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED).""" + + def __init__(self, num_pending_calls, stop_on_exception): + self.num_pending_calls = num_pending_calls + self.stop_on_exception = stop_on_exception + self.lock = threading.Lock() + super(_AllCompletedWaiter, self).__init__() + + def _decrement_pending_calls(self): + with self.lock: + self.num_pending_calls -= 1 + if not self.num_pending_calls: + self.event.set() + + def add_result(self, future): + super(_AllCompletedWaiter, self).add_result(future) + self._decrement_pending_calls() + + def add_exception(self, future): + super(_AllCompletedWaiter, self).add_exception(future) + if self.stop_on_exception: + self.event.set() + else: + self._decrement_pending_calls() + + def add_cancelled(self, future): + super(_AllCompletedWaiter, self).add_cancelled(future) + self._decrement_pending_calls() + +class _AcquireFutures(object): + """A context manager that does an ordered acquire of Future conditions.""" + + def __init__(self, futures): + self.futures = sorted(futures, key=id) + + def __enter__(self): + for future in self.futures: + future._condition.acquire() + + def __exit__(self, *args): + for future in self.futures: + future._condition.release() + +def _create_and_install_waiters(fs, return_when): + if return_when == _AS_COMPLETED: + waiter = _AsCompletedWaiter() + elif return_when == FIRST_COMPLETED: + waiter = _FirstCompletedWaiter() + else: + pending_count = sum( + f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs) + + if return_when == FIRST_EXCEPTION: + waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True) + elif return_when == ALL_COMPLETED: + waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False) + else: + raise ValueError("Invalid return condition: %r" % return_when) + + for f in fs: + f._waiters.append(waiter) + + return waiter + +def as_completed(fs, timeout=None): + """An iterator over the given futures that yields each as it completes. + + Args: + fs: The sequence of Futures (possibly created by different Executors) to + iterate over. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + + Returns: + An iterator that yields the given Futures as they complete (finished or + cancelled). If any given Futures are duplicated, they will be returned + once. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + """ + if timeout is not None: + end_time = timeout + time.time() + + fs = set(fs) + with _AcquireFutures(fs): + finished = set( + f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + pending = fs - finished + waiter = _create_and_install_waiters(fs, _AS_COMPLETED) + + try: + for future in finished: + yield future + + while pending: + if timeout is None: + wait_timeout = None + else: + wait_timeout = end_time - time.time() + if wait_timeout < 0: + raise TimeoutError( + '%d (of %d) futures unfinished' % ( + len(pending), len(fs))) + + waiter.event.wait(wait_timeout) + + with waiter.lock: + finished = waiter.finished_futures + waiter.finished_futures = [] + waiter.event.clear() + + for future in finished: + yield future + pending.remove(future) + + finally: + for f in fs: + with f._condition: + f._waiters.remove(waiter) + +DoneAndNotDoneFutures = collections.namedtuple( + 'DoneAndNotDoneFutures', 'done not_done') +def wait(fs, timeout=None, return_when=ALL_COMPLETED): + """Wait for the futures in the given sequence to complete. + + Args: + fs: The sequence of Futures (possibly created by different Executors) to + wait upon. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + return_when: Indicates when this function should return. The options + are: + + FIRST_COMPLETED - Return when any future finishes or is + cancelled. + FIRST_EXCEPTION - Return when any future finishes by raising an + exception. If no future raises an exception + then it is equivalent to ALL_COMPLETED. + ALL_COMPLETED - Return when all futures finish or are cancelled. + + Returns: + A named 2-tuple of sets. The first set, named 'done', contains the + futures that completed (is finished or cancelled) before the wait + completed. The second set, named 'not_done', contains uncompleted + futures. + """ + with _AcquireFutures(fs): + done = set(f for f in fs + if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]) + not_done = set(fs) - done + + if (return_when == FIRST_COMPLETED) and done: + return DoneAndNotDoneFutures(done, not_done) + elif (return_when == FIRST_EXCEPTION) and done: + if any(f for f in done + if not f.cancelled() and f.exception() is not None): + return DoneAndNotDoneFutures(done, not_done) + + if len(done) == len(fs): + return DoneAndNotDoneFutures(done, not_done) + + waiter = _create_and_install_waiters(fs, return_when) + + waiter.event.wait(timeout) + for f in fs: + with f._condition: + f._waiters.remove(waiter) + + done.update(waiter.finished_futures) + return DoneAndNotDoneFutures(done, set(fs) - done) + +class Future(object): + """Represents the result of an asynchronous computation.""" + + def __init__(self): + """Initializes the future. Should not be called by clients.""" + self._condition = threading.Condition() + self._state = PENDING + self._result = None + self._exception = None + self._traceback = None + self._waiters = [] + self._done_callbacks = [] + + def _invoke_callbacks(self): + for callback in self._done_callbacks: + try: + callback(self) + except Exception: + LOGGER.exception('exception calling callback for %r', self) + except BaseException: + # Explicitly let all other new-style exceptions through so + # that we can catch all old-style exceptions with a simple + # "except:" clause below. + # + # All old-style exception objects are instances of + # types.InstanceType, but "except types.InstanceType:" does + # not catch old-style exceptions for some reason. Thus, the + # only way to catch all old-style exceptions without catching + # any new-style exceptions is to filter out the new-style + # exceptions, which all derive from BaseException. + raise + except: + # Because of the BaseException clause above, this handler only + # executes for old-style exception objects. + LOGGER.exception('exception calling callback for %r', self) + + def __repr__(self): + with self._condition: + if self._state == FINISHED: + if self._exception: + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._exception.__class__.__name__) + else: + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state], + self._result.__class__.__name__) + return '' % ( + hex(id(self)), + _STATE_TO_DESCRIPTION_MAP[self._state]) + + def cancel(self): + """Cancel the future if possible. + + Returns True if the future was cancelled, False otherwise. A future + cannot be cancelled if it is running or has already completed. + """ + with self._condition: + if self._state in [RUNNING, FINISHED]: + return False + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + return True + + self._state = CANCELLED + self._condition.notify_all() + + self._invoke_callbacks() + return True + + def cancelled(self): + """Return True if the future has cancelled.""" + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED] + + def running(self): + """Return True if the future is currently executing.""" + with self._condition: + return self._state == RUNNING + + def done(self): + """Return True of the future was cancelled or finished executing.""" + with self._condition: + return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED] + + def __get_result(self): + if self._exception: + if isinstance(self._exception, types.InstanceType): + # The exception is an instance of an old-style class, which + # means type(self._exception) returns types.ClassType instead + # of the exception's actual class type. + exception_type = self._exception.__class__ + else: + exception_type = type(self._exception) + raise exception_type, self._exception, self._traceback + else: + return self._result + + def add_done_callback(self, fn): + """Attaches a callable that will be called when the future finishes. + + Args: + fn: A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. + """ + with self._condition: + if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]: + self._done_callbacks.append(fn) + return + fn(self) + + def result(self, timeout=None): + """Return the result of the call that the future represents. + + Args: + timeout: The number of seconds to wait for the result if the future + isn't done. If None, then there is no limit on the wait time. + + Returns: + The result of the call that the future represents. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + Exception: If the call raised then that exception will be raised. + """ + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self.__get_result() + else: + raise TimeoutError() + + def exception_info(self, timeout=None): + """Return a tuple of (exception, traceback) raised by the call that the + future represents. + + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. + + Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + """ + with self._condition: + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception, self._traceback + + self._condition.wait(timeout) + + if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: + raise CancelledError() + elif self._state == FINISHED: + return self._exception, self._traceback + else: + raise TimeoutError() + + def exception(self, timeout=None): + """Return the exception raised by the call that the future represents. + + Args: + timeout: The number of seconds to wait for the exception if the + future isn't done. If None, then there is no limit on the wait + time. + + Returns: + The exception raised by the call that the future represents or None + if the call completed without raising. + + Raises: + CancelledError: If the future was cancelled. + TimeoutError: If the future didn't finish executing before the given + timeout. + """ + return self.exception_info(timeout)[0] + + # The following methods should only be used by Executors and in tests. + def set_running_or_notify_cancel(self): + """Mark the future as running or process any cancel notifications. + + Should only be used by Executor implementations and unit tests. + + If the future has been cancelled (cancel() was called and returned + True) then any threads waiting on the future completing (though calls + to as_completed() or wait()) are notified and False is returned. + + If the future was not cancelled then it is put in the running state + (future calls to running() will return True) and True is returned. + + This method should be called by Executor implementations before + executing the work associated with this future. If this method returns + False then the work should not be executed. + + Returns: + False if the Future was cancelled, True otherwise. + + Raises: + RuntimeError: if this method was already called or if set_result() + or set_exception() was called. + """ + with self._condition: + if self._state == CANCELLED: + self._state = CANCELLED_AND_NOTIFIED + for waiter in self._waiters: + waiter.add_cancelled(self) + # self._condition.notify_all() is not necessary because + # self.cancel() triggers a notification. + return False + elif self._state == PENDING: + self._state = RUNNING + return True + else: + LOGGER.critical('Future %s in unexpected state: %s', + id(self), + self._state) + raise RuntimeError('Future in unexpected state') + + def set_result(self, result): + """Sets the return value of work associated with the future. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._result = result + self._state = FINISHED + for waiter in self._waiters: + waiter.add_result(self) + self._condition.notify_all() + self._invoke_callbacks() + + def set_exception_info(self, exception, traceback): + """Sets the result of the future as being the given exception + and traceback. + + Should only be used by Executor implementations and unit tests. + """ + with self._condition: + self._exception = exception + self._traceback = traceback + self._state = FINISHED + for waiter in self._waiters: + waiter.add_exception(self) + self._condition.notify_all() + self._invoke_callbacks() + + def set_exception(self, exception): + """Sets the result of the future as being the given exception. + + Should only be used by Executor implementations and unit tests. + """ + self.set_exception_info(exception, None) + +class Executor(object): + """This is an abstract base class for concrete asynchronous executors.""" + + def submit(self, fn, *args, **kwargs): + """Submits a callable to be executed with the given arguments. + + Schedules the callable to be executed as fn(*args, **kwargs) and returns + a Future instance representing the execution of the callable. + + Returns: + A Future representing the given call. + """ + raise NotImplementedError() + + def map(self, fn, *iterables, **kwargs): + """Returns a iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ + timeout = kwargs.get('timeout') + if timeout is not None: + end_time = timeout + time.time() + + fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)] + + # Yield must be hidden in closure so that the futures are submitted + # before the first iterator value is required. + def result_iterator(): + try: + for future in fs: + if timeout is None: + yield future.result() + else: + yield future.result(end_time - time.time()) + finally: + for future in fs: + future.cancel() + return result_iterator() + + def shutdown(self, wait=True): + """Clean-up the resources associated with the Executor. + + It is safe to call this method several times. Otherwise, no other + methods can be called after this one. + + Args: + wait: If True then shutdown will not return until all running + futures have finished executing and the resources used by the + executor have been reclaimed. + """ + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.shutdown(wait=True) + return False diff --git a/lib/concurrent/futures/process.py b/lib/concurrent/futures/process.py new file mode 100644 index 00000000..fa5b96fd --- /dev/null +++ b/lib/concurrent/futures/process.py @@ -0,0 +1,363 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ProcessPoolExecutor. + +The follow diagram and text describe the data-flow through the system: + +|======================= In-process =====================|== Out-of-process ==| + ++----------+ +----------+ +--------+ +-----------+ +---------+ +| | => | Work Ids | => | | => | Call Q | => | | +| | +----------+ | | +-----------+ | | +| | | ... | | | | ... | | | +| | | 6 | | | | 5, call() | | | +| | | 7 | | | | ... | | | +| Process | | ... | | Local | +-----------+ | Process | +| Pool | +----------+ | Worker | | #1..n | +| Executor | | Thread | | | +| | +----------- + | | +-----------+ | | +| | <=> | Work Items | <=> | | <= | Result Q | <= | | +| | +------------+ | | +-----------+ | | +| | | 6: call() | | | | ... | | | +| | | future | | | | 4, result | | | +| | | ... | | | | 3, except | | | ++----------+ +------------+ +--------+ +-----------+ +---------+ + +Executor.submit() called: +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict +- adds the id of the _WorkItem to the "Work Ids" queue + +Local worker thread: +- reads work ids from the "Work Ids" queue and looks up the corresponding + WorkItem from the "Work Items" dict: if the work item has been cancelled then + it is simply removed from the dict, otherwise it is repackaged as a + _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" + until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because + calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). +- reads _ResultItems from "Result Q", updates the future stored in the + "Work Items" dict and deletes the dict entry + +Process #1..n: +- reads _CallItems from "Call Q", executes the calls, and puts the resulting + _ResultItems in "Request Q" +""" + +import atexit +from concurrent.futures import _base +import Queue as queue +import multiprocessing +import threading +import weakref +import sys + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +# Workers are created as daemon threads and processes. This is done to allow the +# interpreter to exit when there are still idle processes in a +# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However, +# allowing workers to die with the interpreter has two undesirable properties: +# - The workers would still be running during interpretor shutdown, +# meaning that they would fail in unpredictable ways. +# - The workers could be killed while evaluating a work item, which could +# be bad if the callable being evaluated has external side-effects e.g. +# writing to a file. +# +# To work around this problem, an exit handler is installed which tells the +# workers to exit when their work queues are empty and then waits until the +# threads/processes finish. + +_threads_queues = weakref.WeakKeyDictionary() +_shutdown = False + +def _python_exit(): + global _shutdown + _shutdown = True + items = list(_threads_queues.items()) if _threads_queues else () + for t, q in items: + q.put(None) + for t, q in items: + t.join(sys.maxint) + +# Controls how many more calls than processes will be queued in the call queue. +# A smaller number will mean that processes spend more time idle waiting for +# work while a larger number will make Future.cancel() succeed less frequently +# (Futures in the call queue cannot be cancelled). +EXTRA_QUEUED_CALLS = 1 + +class _WorkItem(object): + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + +class _ResultItem(object): + def __init__(self, work_id, exception=None, result=None): + self.work_id = work_id + self.exception = exception + self.result = result + +class _CallItem(object): + def __init__(self, work_id, fn, args, kwargs): + self.work_id = work_id + self.fn = fn + self.args = args + self.kwargs = kwargs + +def _process_worker(call_queue, result_queue): + """Evaluates calls from call_queue and places the results in result_queue. + + This worker is run in a separate process. + + Args: + call_queue: A multiprocessing.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A multiprocessing.Queue of _ResultItems that will written + to by the worker. + shutdown: A multiprocessing.Event that will be set as a signal to the + worker that it should exit when call_queue is empty. + """ + while True: + call_item = call_queue.get(block=True) + if call_item is None: + # Wake up queue management thread + result_queue.put(None) + return + try: + r = call_item.fn(*call_item.args, **call_item.kwargs) + except: + e = sys.exc_info()[1] + result_queue.put(_ResultItem(call_item.work_id, + exception=e)) + else: + result_queue.put(_ResultItem(call_item.work_id, + result=r)) + +def _add_call_item_to_queue(pending_work_items, + work_ids, + call_queue): + """Fills call_queue with _WorkItems from pending_work_items. + + This function never blocks. + + Args: + pending_work_items: A dict mapping work ids to _WorkItems e.g. + {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids + are consumed and the corresponding _WorkItems from + pending_work_items are transformed into _CallItems and put in + call_queue. + call_queue: A multiprocessing.Queue that will be filled with _CallItems + derived from _WorkItems. + """ + while True: + if call_queue.full(): + return + try: + work_id = work_ids.get(block=False) + except queue.Empty: + return + else: + work_item = pending_work_items[work_id] + + if work_item.future.set_running_or_notify_cancel(): + call_queue.put(_CallItem(work_id, + work_item.fn, + work_item.args, + work_item.kwargs), + block=True) + else: + del pending_work_items[work_id] + continue + +def _queue_management_worker(executor_reference, + processes, + pending_work_items, + work_ids_queue, + call_queue, + result_queue): + """Manages the communication between this process and the worker processes. + + This function is run in a local thread. + + Args: + executor_reference: A weakref.ref to the ProcessPoolExecutor that owns + this thread. Used to determine if the ProcessPoolExecutor has been + garbage collected and that this function can exit. + process: A list of the multiprocessing.Process instances used as + workers. + pending_work_items: A dict mapping work ids to _WorkItems e.g. + {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]). + call_queue: A multiprocessing.Queue that will be filled with _CallItems + derived from _WorkItems for processing by the process workers. + result_queue: A multiprocessing.Queue of _ResultItems generated by the + process workers. + """ + nb_shutdown_processes = [0] + def shutdown_one_process(): + """Tell a worker to terminate, which will in turn wake us again""" + call_queue.put(None) + nb_shutdown_processes[0] += 1 + while True: + _add_call_item_to_queue(pending_work_items, + work_ids_queue, + call_queue) + + result_item = result_queue.get(block=True) + if result_item is not None: + work_item = pending_work_items[result_item.work_id] + del pending_work_items[result_item.work_id] + + if result_item.exception: + work_item.future.set_exception(result_item.exception) + else: + work_item.future.set_result(result_item.result) + # Delete references to object. See issue16284 + del work_item + # Check whether we should start shutting down. + executor = executor_reference() + # No more work items can be added if: + # - The interpreter is shutting down OR + # - The executor that owns this worker has been collected OR + # - The executor that owns this worker has been shutdown. + if _shutdown or executor is None or executor._shutdown_thread: + # Since no new work items can be added, it is safe to shutdown + # this thread if there are no pending work items. + if not pending_work_items: + while nb_shutdown_processes[0] < len(processes): + shutdown_one_process() + # If .join() is not called on the created processes then + # some multiprocessing.Queue methods may deadlock on Mac OS + # X. + for p in processes: + p.join() + call_queue.close() + return + del executor + +_system_limits_checked = False +_system_limited = None +def _check_system_limits(): + global _system_limits_checked, _system_limited + if _system_limits_checked: + if _system_limited: + raise NotImplementedError(_system_limited) + _system_limits_checked = True + try: + import os + nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") + except (AttributeError, ValueError): + # sysconf not available or setting not available + return + if nsems_max == -1: + # indetermine limit, assume that limit is determined + # by available memory only + return + if nsems_max >= 256: + # minimum number of semaphores available + # according to POSIX + return + _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max + raise NotImplementedError(_system_limited) + + +class ProcessPoolExecutor(_base.Executor): + def __init__(self, max_workers=None): + """Initializes a new ProcessPoolExecutor instance. + + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + """ + _check_system_limits() + + if max_workers is None: + self._max_workers = multiprocessing.cpu_count() + else: + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self._max_workers = max_workers + + # Make the call queue slightly larger than the number of processes to + # prevent the worker processes from idling. But don't make it too big + # because futures in the call queue cannot be cancelled. + self._call_queue = multiprocessing.Queue(self._max_workers + + EXTRA_QUEUED_CALLS) + self._result_queue = multiprocessing.Queue() + self._work_ids = queue.Queue() + self._queue_management_thread = None + self._processes = set() + + # Shutdown is a two-step process. + self._shutdown_thread = False + self._shutdown_lock = threading.Lock() + self._queue_count = 0 + self._pending_work_items = {} + + def _start_queue_management_thread(self): + # When the executor gets lost, the weakref callback will wake up + # the queue management thread. + def weakref_cb(_, q=self._result_queue): + q.put(None) + if self._queue_management_thread is None: + self._queue_management_thread = threading.Thread( + target=_queue_management_worker, + args=(weakref.ref(self, weakref_cb), + self._processes, + self._pending_work_items, + self._work_ids, + self._call_queue, + self._result_queue)) + self._queue_management_thread.daemon = True + self._queue_management_thread.start() + _threads_queues[self._queue_management_thread] = self._result_queue + + def _adjust_process_count(self): + for _ in range(len(self._processes), self._max_workers): + p = multiprocessing.Process( + target=_process_worker, + args=(self._call_queue, + self._result_queue)) + p.start() + self._processes.add(p) + + def submit(self, fn, *args, **kwargs): + with self._shutdown_lock: + if self._shutdown_thread: + raise RuntimeError('cannot schedule new futures after shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._pending_work_items[self._queue_count] = w + self._work_ids.put(self._queue_count) + self._queue_count += 1 + # Wake up queue management thread + self._result_queue.put(None) + + self._start_queue_management_thread() + self._adjust_process_count() + return f + submit.__doc__ = _base.Executor.submit.__doc__ + + def shutdown(self, wait=True): + with self._shutdown_lock: + self._shutdown_thread = True + if self._queue_management_thread: + # Wake up queue management thread + self._result_queue.put(None) + if wait: + self._queue_management_thread.join(sys.maxint) + # To reduce the risk of openning too many files, remove references to + # objects that use file descriptors. + self._queue_management_thread = None + self._call_queue = None + self._result_queue = None + self._processes = None + shutdown.__doc__ = _base.Executor.shutdown.__doc__ + +atexit.register(_python_exit) diff --git a/lib/concurrent/futures/thread.py b/lib/concurrent/futures/thread.py new file mode 100644 index 00000000..efae619a --- /dev/null +++ b/lib/concurrent/futures/thread.py @@ -0,0 +1,149 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ThreadPoolExecutor.""" + +import atexit +from concurrent.futures import _base +import Queue as queue +import threading +import weakref +import sys + +try: + from multiprocessing import cpu_count +except ImportError: + # some platforms don't have multiprocessing + def cpu_count(): + return None + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +# Workers are created as daemon threads. This is done to allow the interpreter +# to exit when there are still idle threads in a ThreadPoolExecutor's thread +# pool (i.e. shutdown() was not called). However, allowing workers to die with +# the interpreter has two undesirable properties: +# - The workers would still be running during interpretor shutdown, +# meaning that they would fail in unpredictable ways. +# - The workers could be killed while evaluating a work item, which could +# be bad if the callable being evaluated has external side-effects e.g. +# writing to a file. +# +# To work around this problem, an exit handler is installed which tells the +# workers to exit when their work queues are empty and then waits until the +# threads finish. + +_threads_queues = weakref.WeakKeyDictionary() +_shutdown = False + +def _python_exit(): + global _shutdown + _shutdown = True + items = list(_threads_queues.items()) if _threads_queues else () + for t, q in items: + q.put(None) + for t, q in items: + t.join(sys.maxint) + +atexit.register(_python_exit) + +class _WorkItem(object): + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + + def run(self): + if not self.future.set_running_or_notify_cancel(): + return + + try: + result = self.fn(*self.args, **self.kwargs) + except: + e, tb = sys.exc_info()[1:] + self.future.set_exception_info(e, tb) + else: + self.future.set_result(result) + +def _worker(executor_reference, work_queue): + try: + while True: + work_item = work_queue.get(block=True) + if work_item is not None: + work_item.run() + # Delete references to object. See issue16284 + del work_item + continue + executor = executor_reference() + # Exit if: + # - The interpreter is shutting down OR + # - The executor that owns the worker has been collected OR + # - The executor that owns the worker has been shutdown. + if _shutdown or executor is None or executor._shutdown: + # Notice other workers + work_queue.put(None) + return + del executor + except: + _base.LOGGER.critical('Exception in worker', exc_info=True) + + +class ThreadPoolExecutor(_base.Executor): + def __init__(self, max_workers=None): + """Initializes a new ThreadPoolExecutor instance. + + Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + """ + if max_workers is None: + # Use this number because ThreadPoolExecutor is often + # used to overlap I/O instead of CPU work. + max_workers = (cpu_count() or 1) * 5 + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self._max_workers = max_workers + self._work_queue = queue.Queue() + self._threads = set() + self._shutdown = False + self._shutdown_lock = threading.Lock() + + def submit(self, fn, *args, **kwargs): + with self._shutdown_lock: + if self._shutdown: + raise RuntimeError('cannot schedule new futures after shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._work_queue.put(w) + self._adjust_thread_count() + return f + submit.__doc__ = _base.Executor.submit.__doc__ + + def _adjust_thread_count(self): + # When the executor gets lost, the weakref callback will wake up + # the worker threads. + def weakref_cb(_, q=self._work_queue): + q.put(None) + # TODO(bquinlan): Should avoid creating new threads if there are more + # idle threads than items in the work queue. + if len(self._threads) < self._max_workers: + t = threading.Thread(target=_worker, + args=(weakref.ref(self, weakref_cb), + self._work_queue)) + t.daemon = True + t.start() + self._threads.add(t) + _threads_queues[t] = self._work_queue + + def shutdown(self, wait=True): + with self._shutdown_lock: + self._shutdown = True + self._work_queue.put(None) + if wait: + for t in self._threads: + t.join(sys.maxint) + shutdown.__doc__ = _base.Executor.shutdown.__doc__ diff --git a/lib/funcsigs/__init__.py b/lib/funcsigs/__init__.py new file mode 100644 index 00000000..5f5378b4 --- /dev/null +++ b/lib/funcsigs/__init__.py @@ -0,0 +1,829 @@ +# Copyright 2001-2013 Python Software Foundation; All Rights Reserved +"""Function signature objects for callables + +Back port of Python 3.3's function signature tools from the inspect module, +modified to be compatible with Python 2.6, 2.7 and 3.3+. +""" +from __future__ import absolute_import, division, print_function +import itertools +import functools +import re +import types + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + +from funcsigs.version import __version__ + +__all__ = ['BoundArguments', 'Parameter', 'Signature', 'signature'] + + +_WrapperDescriptor = type(type.__call__) +_MethodWrapper = type(all.__call__) + +_NonUserDefinedCallables = (_WrapperDescriptor, + _MethodWrapper, + types.BuiltinFunctionType) + + +def formatannotation(annotation, base_module=None): + if isinstance(annotation, type): + if annotation.__module__ in ('builtins', '__builtin__', base_module): + return annotation.__name__ + return annotation.__module__+'.'+annotation.__name__ + return repr(annotation) + + +def _get_user_defined_method(cls, method_name, *nested): + try: + if cls is type: + return + meth = getattr(cls, method_name) + for name in nested: + meth = getattr(meth, name, meth) + except AttributeError: + return + else: + if not isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return meth + + +def signature(obj): + '''Get a signature object for the passed callable.''' + + if not callable(obj): + raise TypeError('{0!r} is not a callable object'.format(obj)) + + if isinstance(obj, types.MethodType): + sig = signature(obj.__func__) + if obj.__self__ is None: + # Unbound method - preserve as-is. + return sig + else: + # Bound method. Eat self - if we can. + params = tuple(sig.parameters.values()) + + if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY): + raise ValueError('invalid method signature') + + kind = params[0].kind + if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY): + # Drop first parameter: + # '(p1, p2[, ...])' -> '(p2[, ...])' + params = params[1:] + else: + if kind is not _VAR_POSITIONAL: + # Unless we add a new parameter type we never + # get here + raise ValueError('invalid argument type') + # It's a var-positional parameter. + # Do nothing. '(*args[, ...])' -> '(*args[, ...])' + + return sig.replace(parameters=params) + + try: + sig = obj.__signature__ + except AttributeError: + pass + else: + if sig is not None: + return sig + + try: + # Was this function wrapped by a decorator? + wrapped = obj.__wrapped__ + except AttributeError: + pass + else: + return signature(wrapped) + + if isinstance(obj, types.FunctionType): + return Signature.from_function(obj) + + if isinstance(obj, functools.partial): + sig = signature(obj.func) + + new_params = OrderedDict(sig.parameters.items()) + + partial_args = obj.args or () + partial_keywords = obj.keywords or {} + try: + ba = sig.bind_partial(*partial_args, **partial_keywords) + except TypeError as ex: + msg = 'partial object {0!r} has incorrect arguments'.format(obj) + raise ValueError(msg) + + for arg_name, arg_value in ba.arguments.items(): + param = new_params[arg_name] + if arg_name in partial_keywords: + # We set a new default value, because the following code + # is correct: + # + # >>> def foo(a): print(a) + # >>> print(partial(partial(foo, a=10), a=20)()) + # 20 + # >>> print(partial(partial(foo, a=10), a=20)(a=30)) + # 30 + # + # So, with 'partial' objects, passing a keyword argument is + # like setting a new default value for the corresponding + # parameter + # + # We also mark this parameter with '_partial_kwarg' + # flag. Later, in '_bind', the 'default' value of this + # parameter will be added to 'kwargs', to simulate + # the 'functools.partial' real call. + new_params[arg_name] = param.replace(default=arg_value, + _partial_kwarg=True) + + elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and + not param._partial_kwarg): + new_params.pop(arg_name) + + return sig.replace(parameters=new_params.values()) + + sig = None + if isinstance(obj, type): + # obj is a class or a metaclass + + # First, let's see if it has an overloaded __call__ defined + # in its metaclass + call = _get_user_defined_method(type(obj), '__call__') + if call is not None: + sig = signature(call) + else: + # Now we check if the 'obj' class has a '__new__' method + new = _get_user_defined_method(obj, '__new__') + if new is not None: + sig = signature(new) + else: + # Finally, we should have at least __init__ implemented + init = _get_user_defined_method(obj, '__init__') + if init is not None: + sig = signature(init) + elif not isinstance(obj, _NonUserDefinedCallables): + # An object with __call__ + # We also check that the 'obj' is not an instance of + # _WrapperDescriptor or _MethodWrapper to avoid + # infinite recursion (and even potential segfault) + call = _get_user_defined_method(type(obj), '__call__', 'im_func') + if call is not None: + sig = signature(call) + + if sig is not None: + # For classes and objects we skip the first parameter of their + # __call__, __new__, or __init__ methods + return sig.replace(parameters=tuple(sig.parameters.values())[1:]) + + if isinstance(obj, types.BuiltinFunctionType): + # Raise a nicer error message for builtins + msg = 'no signature found for builtin function {0!r}'.format(obj) + raise ValueError(msg) + + raise ValueError('callable {0!r} is not supported by signature'.format(obj)) + + +class _void(object): + '''A private marker - used in Parameter & Signature''' + + +class _empty(object): + pass + + +class _ParameterKind(int): + def __new__(self, *args, **kwargs): + obj = int.__new__(self, *args) + obj._name = kwargs['name'] + return obj + + def __str__(self): + return self._name + + def __repr__(self): + return '<_ParameterKind: {0!r}>'.format(self._name) + + +_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY') +_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD') +_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL') +_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY') +_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD') + + +class Parameter(object): + '''Represents a parameter in a function signature. + + Has the following public attributes: + + * name : str + The name of the parameter as a string. + * default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is not set. + * annotation + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is not set. + * kind : str + Describes how argument values are bound to the parameter. + Possible values: `Parameter.POSITIONAL_ONLY`, + `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, + `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. + ''' + + __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg') + + POSITIONAL_ONLY = _POSITIONAL_ONLY + POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD + VAR_POSITIONAL = _VAR_POSITIONAL + KEYWORD_ONLY = _KEYWORD_ONLY + VAR_KEYWORD = _VAR_KEYWORD + + empty = _empty + + def __init__(self, name, kind, default=_empty, annotation=_empty, + _partial_kwarg=False): + + if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD, + _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD): + raise ValueError("invalid value for 'Parameter.kind' attribute") + self._kind = kind + + if default is not _empty: + if kind in (_VAR_POSITIONAL, _VAR_KEYWORD): + msg = '{0} parameters cannot have default values'.format(kind) + raise ValueError(msg) + self._default = default + self._annotation = annotation + + if name is None: + if kind != _POSITIONAL_ONLY: + raise ValueError("None is not a valid name for a " + "non-positional-only parameter") + self._name = name + else: + name = str(name) + if kind != _POSITIONAL_ONLY and not re.match(r'[a-z_]\w*$', name, re.I): + msg = '{0!r} is not a valid parameter name'.format(name) + raise ValueError(msg) + self._name = name + + self._partial_kwarg = _partial_kwarg + + @property + def name(self): + return self._name + + @property + def default(self): + return self._default + + @property + def annotation(self): + return self._annotation + + @property + def kind(self): + return self._kind + + def replace(self, name=_void, kind=_void, annotation=_void, + default=_void, _partial_kwarg=_void): + '''Creates a customized copy of the Parameter.''' + + if name is _void: + name = self._name + + if kind is _void: + kind = self._kind + + if annotation is _void: + annotation = self._annotation + + if default is _void: + default = self._default + + if _partial_kwarg is _void: + _partial_kwarg = self._partial_kwarg + + return type(self)(name, kind, default=default, annotation=annotation, + _partial_kwarg=_partial_kwarg) + + def __str__(self): + kind = self.kind + + formatted = self._name + if kind == _POSITIONAL_ONLY: + if formatted is None: + formatted = '' + formatted = '<{0}>'.format(formatted) + + # Add annotation and default value + if self._annotation is not _empty: + formatted = '{0}:{1}'.format(formatted, + formatannotation(self._annotation)) + + if self._default is not _empty: + formatted = '{0}={1}'.format(formatted, repr(self._default)) + + if kind == _VAR_POSITIONAL: + formatted = '*' + formatted + elif kind == _VAR_KEYWORD: + formatted = '**' + formatted + + return formatted + + def __repr__(self): + return '<{0} at {1:#x} {2!r}>'.format(self.__class__.__name__, + id(self), self.name) + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + return (issubclass(other.__class__, Parameter) and + self._name == other._name and + self._kind == other._kind and + self._default == other._default and + self._annotation == other._annotation) + + def __ne__(self, other): + return not self.__eq__(other) + + +class BoundArguments(object): + '''Result of `Signature.bind` call. Holds the mapping of arguments + to the function's parameters. + + Has the following public attributes: + + * arguments : OrderedDict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. + * signature : Signature + The Signature object that created this instance. + * args : tuple + Tuple of positional arguments values. + * kwargs : dict + Dict of keyword arguments values. + ''' + + def __init__(self, signature, arguments): + self.arguments = arguments + self._signature = signature + + @property + def signature(self): + return self._signature + + @property + def args(self): + args = [] + for param_name, param in self._signature.parameters.items(): + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + # Keyword arguments mapped by 'functools.partial' + # (Parameter._partial_kwarg is True) are mapped + # in 'BoundArguments.kwargs', along with VAR_KEYWORD & + # KEYWORD_ONLY + break + + try: + arg = self.arguments[param_name] + except KeyError: + # We're done here. Other arguments + # will be mapped in 'BoundArguments.kwargs' + break + else: + if param.kind == _VAR_POSITIONAL: + # *args + args.extend(arg) + else: + # plain argument + args.append(arg) + + return tuple(args) + + @property + def kwargs(self): + kwargs = {} + kwargs_started = False + for param_name, param in self._signature.parameters.items(): + if not kwargs_started: + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + kwargs_started = True + else: + if param_name not in self.arguments: + kwargs_started = True + continue + + if not kwargs_started: + continue + + try: + arg = self.arguments[param_name] + except KeyError: + pass + else: + if param.kind == _VAR_KEYWORD: + # **kwargs + kwargs.update(arg) + else: + # plain keyword argument + kwargs[param_name] = arg + + return kwargs + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + return (issubclass(other.__class__, BoundArguments) and + self.signature == other.signature and + self.arguments == other.arguments) + + def __ne__(self, other): + return not self.__eq__(other) + + +class Signature(object): + '''A Signature object represents the overall signature of a function. + It stores a Parameter object for each parameter accepted by the + function, as well as information specific to the function itself. + + A Signature object has the following public attributes and methods: + + * parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in `code.co_varnames`). + * return_annotation : object + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is not set. + * bind(*args, **kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. + * bind_partial(*args, **kwargs) -> BoundArguments + Creates a partial mapping from positional and keyword arguments + to parameters (simulating 'functools.partial' behavior.) + ''' + + __slots__ = ('_return_annotation', '_parameters') + + _parameter_cls = Parameter + _bound_arguments_cls = BoundArguments + + empty = _empty + + def __init__(self, parameters=None, return_annotation=_empty, + __validate_parameters__=True): + '''Constructs Signature from the given list of Parameter + objects and 'return_annotation'. All arguments are optional. + ''' + + if parameters is None: + params = OrderedDict() + else: + if __validate_parameters__: + params = OrderedDict() + top_kind = _POSITIONAL_ONLY + + for idx, param in enumerate(parameters): + kind = param.kind + if kind < top_kind: + msg = 'wrong parameter order: {0} before {1}' + msg = msg.format(top_kind, param.kind) + raise ValueError(msg) + else: + top_kind = kind + + name = param.name + if name is None: + name = str(idx) + param = param.replace(name=name) + + if name in params: + msg = 'duplicate parameter name: {0!r}'.format(name) + raise ValueError(msg) + params[name] = param + else: + params = OrderedDict(((param.name, param) + for param in parameters)) + + self._parameters = params + self._return_annotation = return_annotation + + @classmethod + def from_function(cls, func): + '''Constructs Signature for the given python function''' + + if not isinstance(func, types.FunctionType): + raise TypeError('{0!r} is not a Python function'.format(func)) + + Parameter = cls._parameter_cls + + # Parameter information. + func_code = func.__code__ + pos_count = func_code.co_argcount + arg_names = func_code.co_varnames + positional = tuple(arg_names[:pos_count]) + keyword_only_count = getattr(func_code, 'co_kwonlyargcount', 0) + keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] + annotations = getattr(func, '__annotations__', {}) + defaults = func.__defaults__ + kwdefaults = getattr(func, '__kwdefaults__', None) + + if defaults: + pos_default_count = len(defaults) + else: + pos_default_count = 0 + + parameters = [] + + # Non-keyword-only parameters w/o defaults. + non_default_count = pos_count - pos_default_count + for name in positional[:non_default_count]: + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD)) + + # ... w/ defaults. + for offset, name in enumerate(positional[non_default_count:]): + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD, + default=defaults[offset])) + + # *args + if func_code.co_flags & 0x04: + name = arg_names[pos_count + keyword_only_count] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_POSITIONAL)) + + # Keyword-only parameters. + for name in keyword_only: + default = _empty + if kwdefaults is not None: + default = kwdefaults.get(name, _empty) + + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_KEYWORD_ONLY, + default=default)) + # **kwargs + if func_code.co_flags & 0x08: + index = pos_count + keyword_only_count + if func_code.co_flags & 0x04: + index += 1 + + name = arg_names[index] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_KEYWORD)) + + return cls(parameters, + return_annotation=annotations.get('return', _empty), + __validate_parameters__=False) + + @property + def parameters(self): + try: + return types.MappingProxyType(self._parameters) + except AttributeError: + return OrderedDict(self._parameters.items()) + + @property + def return_annotation(self): + return self._return_annotation + + def replace(self, parameters=_void, return_annotation=_void): + '''Creates a customized copy of the Signature. + Pass 'parameters' and/or 'return_annotation' arguments + to override them in the new copy. + ''' + + if parameters is _void: + parameters = self.parameters.values() + + if return_annotation is _void: + return_annotation = self._return_annotation + + return type(self)(parameters, + return_annotation=return_annotation) + + def __hash__(self): + msg = "unhashable type: '{0}'".format(self.__class__.__name__) + raise TypeError(msg) + + def __eq__(self, other): + if (not issubclass(type(other), Signature) or + self.return_annotation != other.return_annotation or + len(self.parameters) != len(other.parameters)): + return False + + other_positions = dict((param, idx) + for idx, param in enumerate(other.parameters.keys())) + + for idx, (param_name, param) in enumerate(self.parameters.items()): + if param.kind == _KEYWORD_ONLY: + try: + other_param = other.parameters[param_name] + except KeyError: + return False + else: + if param != other_param: + return False + else: + try: + other_idx = other_positions[param_name] + except KeyError: + return False + else: + if (idx != other_idx or + param != other.parameters[param_name]): + return False + + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def _bind(self, args, kwargs, partial=False): + '''Private method. Don't use directly.''' + + arguments = OrderedDict() + + parameters = iter(self.parameters.values()) + parameters_ex = () + arg_vals = iter(args) + + if partial: + # Support for binding arguments to 'functools.partial' objects. + # See 'functools.partial' case in 'signature()' implementation + # for details. + for param_name, param in self.parameters.items(): + if (param._partial_kwarg and param_name not in kwargs): + # Simulating 'functools.partial' behavior + kwargs[param_name] = param.default + + while True: + # Let's iterate through the positional arguments and corresponding + # parameters + try: + arg_val = next(arg_vals) + except StopIteration: + # No more positional arguments + try: + param = next(parameters) + except StopIteration: + # No more parameters. That's it. Just need to check that + # we have no `kwargs` after this while loop + break + else: + if param.kind == _VAR_POSITIONAL: + # That's OK, just empty *args. Let's start parsing + # kwargs + break + elif param.name in kwargs: + if param.kind == _POSITIONAL_ONLY: + msg = '{arg!r} parameter is positional only, ' \ + 'but was passed as a keyword' + msg = msg.format(arg=param.name) + raise TypeError(msg) + parameters_ex = (param,) + break + elif (param.kind == _VAR_KEYWORD or + param.default is not _empty): + # That's fine too - we have a default value for this + # parameter. So, lets start parsing `kwargs`, starting + # with the current parameter + parameters_ex = (param,) + break + else: + if partial: + parameters_ex = (param,) + break + else: + msg = '{arg!r} parameter lacking default value' + msg = msg.format(arg=param.name) + raise TypeError(msg) + else: + # We have a positional argument to process + try: + param = next(parameters) + except StopIteration: + raise TypeError('too many positional arguments') + else: + if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY): + # Looks like we have no parameter for this positional + # argument + raise TypeError('too many positional arguments') + + if param.kind == _VAR_POSITIONAL: + # We have an '*args'-like argument, let's fill it with + # all positional arguments we have left and move on to + # the next phase + values = [arg_val] + values.extend(arg_vals) + arguments[param.name] = tuple(values) + break + + if param.name in kwargs: + raise TypeError('multiple values for argument ' + '{arg!r}'.format(arg=param.name)) + + arguments[param.name] = arg_val + + # Now, we iterate through the remaining parameters to process + # keyword arguments + kwargs_param = None + for param in itertools.chain(parameters_ex, parameters): + if param.kind == _POSITIONAL_ONLY: + # This should never happen in case of a properly built + # Signature object (but let's have this check here + # to ensure correct behaviour just in case) + raise TypeError('{arg!r} parameter is positional only, ' + 'but was passed as a keyword'. \ + format(arg=param.name)) + + if param.kind == _VAR_KEYWORD: + # Memorize that we have a '**kwargs'-like parameter + kwargs_param = param + continue + + param_name = param.name + try: + arg_val = kwargs.pop(param_name) + except KeyError: + # We have no value for this parameter. It's fine though, + # if it has a default value, or it is an '*args'-like + # parameter, left alone by the processing of positional + # arguments. + if (not partial and param.kind != _VAR_POSITIONAL and + param.default is _empty): + raise TypeError('{arg!r} parameter lacking default value'. \ + format(arg=param_name)) + + else: + arguments[param_name] = arg_val + + if kwargs: + if kwargs_param is not None: + # Process our '**kwargs'-like parameter + arguments[kwargs_param.name] = kwargs + else: + raise TypeError('too many keyword arguments %r' % kwargs) + + return self._bound_arguments_cls(self, arguments) + + def bind(*args, **kwargs): + '''Get a BoundArguments object, that maps the passed `args` + and `kwargs` to the function's signature. Raises `TypeError` + if the passed arguments can not be bound. + ''' + return args[0]._bind(args[1:], kwargs) + + def bind_partial(self, *args, **kwargs): + '''Get a BoundArguments object, that partially maps the + passed `args` and `kwargs` to the function's signature. + Raises `TypeError` if the passed arguments can not be bound. + ''' + return self._bind(args, kwargs, partial=True) + + def __str__(self): + result = [] + render_kw_only_separator = True + for idx, param in enumerate(self.parameters.values()): + formatted = str(param) + + kind = param.kind + if kind == _VAR_POSITIONAL: + # OK, we have an '*args'-like parameter, so we won't need + # a '*' to separate keyword-only arguments + render_kw_only_separator = False + elif kind == _KEYWORD_ONLY and render_kw_only_separator: + # We have a keyword-only parameter to render and we haven't + # rendered an '*args'-like parameter before, so add a '*' + # separator to the parameters list ("foo(arg1, *, arg2)" case) + result.append('*') + # This condition should be only triggered once, so + # reset the flag + render_kw_only_separator = False + + result.append(formatted) + + rendered = '({0})'.format(', '.join(result)) + + if self.return_annotation is not _empty: + anno = formatannotation(self.return_annotation) + rendered += ' -> {0}'.format(anno) + + return rendered diff --git a/lib/funcsigs/version.py b/lib/funcsigs/version.py new file mode 100644 index 00000000..7863915f --- /dev/null +++ b/lib/funcsigs/version.py @@ -0,0 +1 @@ +__version__ = "1.0.2" diff --git a/lib/pytz/LICENSE.txt b/lib/pytz/LICENSE.txt new file mode 100644 index 00000000..5e12fcca --- /dev/null +++ b/lib/pytz/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2003-2009 Stuart Bishop + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/lib/pytz/README.txt b/lib/pytz/README.txt new file mode 100644 index 00000000..8b216960 --- /dev/null +++ b/lib/pytz/README.txt @@ -0,0 +1,575 @@ +pytz - World Timezone Definitions for Python +============================================ + +:Author: Stuart Bishop + +Introduction +~~~~~~~~~~~~ + +pytz brings the Olson tz database into Python. This library allows +accurate and cross platform timezone calculations using Python 2.4 +or higher. It also solves the issue of ambiguous times at the end +of daylight saving time, which you can read more about in the Python +Library Reference (``datetime.tzinfo``). + +Almost all of the Olson timezones are supported. + +.. note:: + + This library differs from the documented Python API for + tzinfo implementations; if you want to create local wallclock + times you need to use the ``localize()`` method documented in this + document. In addition, if you perform date arithmetic on local + times that cross DST boundaries, the result may be in an incorrect + timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get + 2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A + ``normalize()`` method is provided to correct this. Unfortunately these + issues cannot be resolved without modifying the Python datetime + implementation (see PEP-431). + + +Installation +~~~~~~~~~~~~ + +This package can either be installed from a .egg file using setuptools, +or from the tarball using the standard Python distutils. + +If you are installing from a tarball, run the following command as an +administrative user:: + + python setup.py install + +If you are installing using setuptools, you don't even need to download +anything as the latest version will be downloaded for you +from the Python package index:: + + easy_install --upgrade pytz + +If you already have the .egg file, you can use that too:: + + easy_install pytz-2008g-py2.6.egg + + +Example & Usage +~~~~~~~~~~~~~~~ + +Localized times and date arithmetic +----------------------------------- + +>>> from datetime import datetime, timedelta +>>> from pytz import timezone +>>> import pytz +>>> utc = pytz.utc +>>> utc.zone +'UTC' +>>> eastern = timezone('US/Eastern') +>>> eastern.zone +'US/Eastern' +>>> amsterdam = timezone('Europe/Amsterdam') +>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z' + +This library only supports two ways of building a localized time. The +first is to use the ``localize()`` method provided by the pytz library. +This is used to localize a naive datetime (datetime with no timezone +information): + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0)) +>>> print(loc_dt.strftime(fmt)) +2002-10-27 06:00:00 EST-0500 + +The second way of building a localized time is by converting an existing +localized time using the standard ``astimezone()`` method: + +>>> ams_dt = loc_dt.astimezone(amsterdam) +>>> ams_dt.strftime(fmt) +'2002-10-27 12:00:00 CET+0100' + +Unfortunately using the tzinfo argument of the standard datetime +constructors ''does not work'' with pytz for many timezones. + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt) +'2002-10-27 12:00:00 LMT+0020' + +It is safe for timezones without daylight saving transitions though, such +as UTC: + +>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt) +'2002-10-27 12:00:00 UTC+0000' + +The preferred way of dealing with times is to always work in UTC, +converting to localtime only when generating output to be read +by humans. + +>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) +>>> loc_dt = utc_dt.astimezone(eastern) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:00:00 EST-0500' + +This library also allows you to do date arithmetic using local +times, although it is more complicated than working in UTC as you +need to use the ``normalize()`` method to handle daylight saving time +and other timezone transitions. In this example, ``loc_dt`` is set +to the instant when daylight saving time ends in the US/Eastern +timezone. + +>>> before = loc_dt - timedelta(minutes=10) +>>> before.strftime(fmt) +'2002-10-27 00:50:00 EST-0500' +>>> eastern.normalize(before).strftime(fmt) +'2002-10-27 01:50:00 EDT-0400' +>>> after = eastern.normalize(before + timedelta(minutes=20)) +>>> after.strftime(fmt) +'2002-10-27 01:10:00 EST-0500' + +Creating local times is also tricky, and the reason why working with +local times is not recommended. Unfortunately, you cannot just pass +a ``tzinfo`` argument when constructing a datetime (see the next +section for more details) + +>>> dt = datetime(2002, 10, 27, 1, 30, 0) +>>> dt1 = eastern.localize(dt, is_dst=True) +>>> dt1.strftime(fmt) +'2002-10-27 01:30:00 EDT-0400' +>>> dt2 = eastern.localize(dt, is_dst=False) +>>> dt2.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +Converting between timezones also needs special attention. We also need +to use the ``normalize()`` method to ensure the conversion is correct. + +>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899)) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = utc.normalize(au_dt.astimezone(utc)) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + +You can take shortcuts when dealing with the UTC side of timezone +conversions. ``normalize()`` and ``localize()`` are not really +necessary when there are no daylight saving time transitions to +deal with. + +>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc) +>>> utc_dt.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' +>>> au_tz = timezone('Australia/Sydney') +>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz)) +>>> au_dt.strftime(fmt) +'2006-03-27 08:34:59 AEDT+1100' +>>> utc_dt2 = au_dt.astimezone(utc) +>>> utc_dt2.strftime(fmt) +'2006-03-26 21:34:59 UTC+0000' + + +``tzinfo`` API +-------------- + +The ``tzinfo`` instances returned by the ``timezone()`` function have +been extended to cope with ambiguous times by adding an ``is_dst`` +parameter to the ``utcoffset()``, ``dst()`` && ``tzname()`` methods. + +>>> tz = timezone('America/St_Johns') + +>>> normal = datetime(2009, 9, 1) +>>> ambiguous = datetime(2009, 10, 31, 23, 30) + +The ``is_dst`` parameter is ignored for most timestamps. It is only used +during DST transition ambiguous periods to resulve that ambiguity. + +>>> tz.utcoffset(normal, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=True) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=True) +datetime.timedelta(-1, 77400) +>>> tz.dst(ambiguous, is_dst=True) +datetime.timedelta(0, 3600) +>>> tz.tzname(ambiguous, is_dst=True) +'NDT' + +>>> tz.utcoffset(normal, is_dst=False) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal, is_dst=False) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal, is_dst=False) +'NDT' + +>>> tz.utcoffset(ambiguous, is_dst=False) +datetime.timedelta(-1, 73800) +>>> tz.dst(ambiguous, is_dst=False) +datetime.timedelta(0) +>>> tz.tzname(ambiguous, is_dst=False) +'NST' + +If ``is_dst`` is not specified, ambiguous timestamps will raise +an ``pytz.exceptions.AmbiguousTimeError`` exception. + +>>> tz.utcoffset(normal) +datetime.timedelta(-1, 77400) +>>> tz.dst(normal) +datetime.timedelta(0, 3600) +>>> tz.tzname(normal) +'NDT' + +>>> import pytz.exceptions +>>> try: +... tz.utcoffset(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.dst(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 +>>> try: +... tz.tzname(ambiguous) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous) +pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00 + + +Problems with Localtime +~~~~~~~~~~~~~~~~~~~~~~~ + +The major problem we have to deal with is that certain datetimes +may occur twice in a year. For example, in the US/Eastern timezone +on the last Sunday morning in October, the following sequence +happens: + + - 01:00 EDT occurs + - 1 hour later, instead of 2:00am the clock is turned back 1 hour + and 01:00 happens again (this time 01:00 EST) + +In fact, every instant between 01:00 and 02:00 occurs twice. This means +that if you try and create a time in the 'US/Eastern' timezone +the standard datetime syntax, there is no way to specify if you meant +before of after the end-of-daylight-saving-time transition. Using the +pytz custom syntax, the best you can do is make an educated guess: + +>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00)) +>>> loc_dt.strftime(fmt) +'2002-10-27 01:30:00 EST-0500' + +As you can see, the system has chosen one for you and there is a 50% +chance of it being out by one hour. For some applications, this does +not matter. However, if you are trying to schedule meetings with people +in different timezones or analyze log files it is not acceptable. + +The best and simplest solution is to stick with using UTC. The pytz +package encourages using UTC for internal timezone representation by +including a special UTC implementation based on the standard Python +reference implementation in the Python documentation. + +The UTC timezone unpickles to be the same instance, and pickles to a +smaller size than other pytz tzinfo instances. The UTC implementation +can be obtained as pytz.utc, pytz.UTC, or pytz.timezone('UTC'). + +>>> import pickle, pytz +>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) +>>> naive = dt.replace(tzinfo=None) +>>> p = pickle.dumps(dt, 1) +>>> naive_p = pickle.dumps(naive, 1) +>>> len(p) - len(naive_p) +17 +>>> new = pickle.loads(p) +>>> new == dt +True +>>> new is dt +False +>>> new.tzinfo is dt.tzinfo +True +>>> pytz.utc is pytz.UTC is pytz.timezone('UTC') +True + +Note that some other timezones are commonly thought of as the same (GMT, +Greenwich, Universal, etc.). The definition of UTC is distinct from these +other timezones, and they are not equivalent. For this reason, they will +not compare the same in Python. + +>>> utc == pytz.timezone('GMT') +False + +See the section `What is UTC`_, below. + +If you insist on working with local times, this library provides a +facility for constructing them unambiguously: + +>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00) +>>> est_dt = eastern.localize(loc_dt, is_dst=True) +>>> edt_dt = eastern.localize(loc_dt, is_dst=False) +>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt)) +2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500 + +If you pass None as the is_dst flag to localize(), pytz will refuse to +guess and raise exceptions if you try to build ambiguous or non-existent +times. + +For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern +timezone when the clocks where put back at the end of Daylight Saving +Time: + +>>> dt = datetime(2002, 10, 27, 1, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.AmbiguousTimeError: +... print('pytz.exceptions.AmbiguousTimeError: %s' % dt) +pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00 + +Similarly, 2:30am on 7th April 2002 never happened at all in the +US/Eastern timezone, as the clocks where put forward at 2:00am skipping +the entire hour: + +>>> dt = datetime(2002, 4, 7, 2, 30, 00) +>>> try: +... eastern.localize(dt, is_dst=None) +... except pytz.exceptions.NonExistentTimeError: +... print('pytz.exceptions.NonExistentTimeError: %s' % dt) +pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00 + +Both of these exceptions share a common base class to make error handling +easier: + +>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError) +True +>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError) +True + + +A special case is where countries change their timezone definitions +with no daylight savings time switch. For example, in 1915 Warsaw +switched from Warsaw time to Central European time with no daylight savings +transition. So at the stroke of midnight on August 5th 1915 the clocks +were wound back 24 minutes creating an ambiguous time period that cannot +be specified without referring to the timezone abbreviation or the +actual UTC offset. In this case midnight happened twice, neither time +during a daylight saving time period. pytz handles this transition by +treating the ambiguous period before the switch as daylight savings +time, and the ambiguous period after as standard time. + + +>>> warsaw = pytz.timezone('Europe/Warsaw') +>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True) +>>> amb_dt1.strftime(fmt) +'1915-08-04 23:59:59 WMT+0124' +>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False) +>>> amb_dt2.strftime(fmt) +'1915-08-04 23:59:59 CET+0100' +>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False) +>>> switch_dt.strftime(fmt) +'1915-08-05 00:00:00 CET+0100' +>>> str(switch_dt - amb_dt1) +'0:24:01' +>>> str(switch_dt - amb_dt2) +'0:00:01' + +The best way of creating a time during an ambiguous time period is +by converting from another timezone such as UTC: + +>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc) +>>> utc_dt.astimezone(warsaw).strftime(fmt) +'1915-08-04 23:36:00 CET+0100' + +The standard Python way of handling all these ambiguities is not to +handle them, such as demonstrated in this example using the US/Eastern +timezone definition from the Python documentation (Note that this +implementation only works for dates between 1987 and 2006 - it is +included for tests only!): + +>>> from pytz.reference import Eastern # pytz.reference only for tests +>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern) +>>> str(dt) +'2002-10-27 00:30:00-04:00' +>>> str(dt + timedelta(hours=1)) +'2002-10-27 01:30:00-05:00' +>>> str(dt + timedelta(hours=2)) +'2002-10-27 02:30:00-05:00' +>>> str(dt + timedelta(hours=3)) +'2002-10-27 03:30:00-05:00' + +Notice the first two results? At first glance you might think they are +correct, but taking the UTC offset into account you find that they are +actually two hours appart instead of the 1 hour we asked for. + +>>> from pytz.reference import UTC # pytz.reference only for tests +>>> str(dt.astimezone(UTC)) +'2002-10-27 04:30:00+00:00' +>>> str((dt + timedelta(hours=1)).astimezone(UTC)) +'2002-10-27 06:30:00+00:00' + + +Country Information +~~~~~~~~~~~~~~~~~~~ + +A mechanism is provided to access the timezones commonly in use +for a particular country, looked up using the ISO 3166 country code. +It returns a list of strings that can be used to retrieve the relevant +tzinfo instance using ``pytz.timezone()``: + +>>> print(' '.join(pytz.country_timezones['nz'])) +Pacific/Auckland Pacific/Chatham + +The Olson database comes with a ISO 3166 country code to English country +name mapping that pytz exposes as a dictionary: + +>>> print(pytz.country_names['nz']) +New Zealand + + +What is UTC +~~~~~~~~~~~ + +'UTC' is `Coordinated Universal Time`_. It is a successor to, but distinct +from, Greenwich Mean Time (GMT) and the various definitions of Universal +Time. UTC is now the worldwide standard for regulating clocks and time +measurement. + +All other timezones are defined relative to UTC, and include offsets like +UTC+0800 - hours to add or subtract from UTC to derive the local time. No +daylight saving time occurs in UTC, making it a useful timezone to perform +date arithmetic without worrying about the confusion and ambiguities caused +by daylight saving time transitions, your country changing its timezone, or +mobile computers that roam through multiple timezones. + +.. _Coordinated Universal Time: https://en.wikipedia.org/wiki/Coordinated_Universal_Time + + +Helpers +~~~~~~~ + +There are two lists of timezones provided. + +``all_timezones`` is the exhaustive list of the timezone names that can +be used. + +>>> from pytz import all_timezones +>>> len(all_timezones) >= 500 +True +>>> 'Etc/Greenwich' in all_timezones +True + +``common_timezones`` is a list of useful, current timezones. It doesn't +contain deprecated zones or historical zones, except for a few I've +deemed in common usage, such as US/Eastern (open a bug report if you +think other timezones are deserving of being included here). It is also +a sequence of strings. + +>>> from pytz import common_timezones +>>> len(common_timezones) < len(all_timezones) +True +>>> 'Etc/Greenwich' in common_timezones +False +>>> 'Australia/Melbourne' in common_timezones +True +>>> 'US/Eastern' in common_timezones +True +>>> 'Canada/Eastern' in common_timezones +True +>>> 'US/Pacific-New' in all_timezones +True +>>> 'US/Pacific-New' in common_timezones +False + +Both ``common_timezones`` and ``all_timezones`` are alphabetically +sorted: + +>>> common_timezones_dupe = common_timezones[:] +>>> common_timezones_dupe.sort() +>>> common_timezones == common_timezones_dupe +True +>>> all_timezones_dupe = all_timezones[:] +>>> all_timezones_dupe.sort() +>>> all_timezones == all_timezones_dupe +True + +``all_timezones`` and ``common_timezones`` are also available as sets. + +>>> from pytz import all_timezones_set, common_timezones_set +>>> 'US/Eastern' in all_timezones_set +True +>>> 'US/Eastern' in common_timezones_set +True +>>> 'Australia/Victoria' in common_timezones_set +False + +You can also retrieve lists of timezones used by particular countries +using the ``country_timezones()`` function. It requires an ISO-3166 +two letter country code. + +>>> from pytz import country_timezones +>>> print(' '.join(country_timezones('ch'))) +Europe/Zurich +>>> print(' '.join(country_timezones('CH'))) +Europe/Zurich + + +License +~~~~~~~ + +MIT license. + +This code is also available as part of Zope 3 under the Zope Public +License, Version 2.1 (ZPL). + +I'm happy to relicense this code if necessary for inclusion in other +open source projects. + + +Latest Versions +~~~~~~~~~~~~~~~ + +This package will be updated after releases of the Olson timezone +database. The latest version can be downloaded from the `Python Package +Index `_. The code that is used +to generate this distribution is hosted on launchpad.net and available +using the `Bazaar version control system `_ +using:: + + bzr branch lp:pytz + +Announcements of new releases are made on +`Launchpad `_, and the +`Atom feed `_ +hosted there. + + +Bugs, Feature Requests & Patches +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Bugs can be reported using `Launchpad `_. + + +Issues & Limitations +~~~~~~~~~~~~~~~~~~~~ + +- Offsets from UTC are rounded to the nearest whole minute, so timezones + such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This + is a limitation of the Python datetime library. + +- If you think a timezone definition is incorrect, I probably can't fix + it. pytz is a direct translation of the Olson timezone database, and + changes to the timezone definitions need to be made to this source. + If you find errors they should be reported to the time zone mailing + list, linked from http://www.iana.org/time-zones. + + +Further Reading +~~~~~~~~~~~~~~~ + +More info than you want to know about timezones: +http://www.twinsun.com/tz/tz-link.htm + + +Contact +~~~~~~~ + +Stuart Bishop + + diff --git a/lib/pytz/__init__.py b/lib/pytz/__init__.py new file mode 100644 index 00000000..e5cbe56d --- /dev/null +++ b/lib/pytz/__init__.py @@ -0,0 +1,1513 @@ +''' +datetime.tzinfo timezone definitions generated from the +Olson timezone database: + + ftp://elsie.nci.nih.gov/pub/tz*.tar.gz + +See the datetime section of the Python Library Reference for information +on how to use these modules. +''' + +# The Olson database is updated several times a year. +OLSON_VERSION = '2014j' +VERSION = '2014.10' # Switching to pip compatible version numbering. +__version__ = VERSION + +OLSEN_VERSION = OLSON_VERSION # Old releases had this misspelling + +__all__ = [ + 'timezone', 'utc', 'country_timezones', 'country_names', + 'AmbiguousTimeError', 'InvalidTimeError', + 'NonExistentTimeError', 'UnknownTimeZoneError', + 'all_timezones', 'all_timezones_set', + 'common_timezones', 'common_timezones_set', + ] + +import sys, datetime, os.path, gettext + +try: + from pkg_resources import resource_stream +except ImportError: + resource_stream = None + +from pytz.exceptions import AmbiguousTimeError +from pytz.exceptions import InvalidTimeError +from pytz.exceptions import NonExistentTimeError +from pytz.exceptions import UnknownTimeZoneError +from pytz.lazy import LazyDict, LazyList, LazySet +from pytz.tzinfo import unpickler +from pytz.tzfile import build_tzinfo, _byte_string + + +try: + unicode + +except NameError: # Python 3.x + + # Python 3.x doesn't have unicode(), making writing code + # for Python 2.3 and Python 3.x a pain. + unicode = str + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii('\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + s.encode('US-ASCII') # Raise an exception if not ASCII + return s # But return the original string - not a byte string. + +else: # Python 2.x + + def ascii(s): + r""" + >>> ascii('Hello') + 'Hello' + >>> ascii(u'Hello') + 'Hello' + >>> ascii(u'\N{TRADE MARK SIGN}') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + UnicodeEncodeError: ... + """ + return s.encode('US-ASCII') + + +def open_resource(name): + """Open a resource from the zoneinfo subdir for reading. + + Uses the pkg_resources module if available and no standard file + found at the calculated location. + """ + name_parts = name.lstrip('/').split('/') + for part in name_parts: + if part == os.path.pardir or os.path.sep in part: + raise ValueError('Bad path segment: %r' % part) + filename = os.path.join(os.path.dirname(__file__), + 'zoneinfo', *name_parts) + if not os.path.exists(filename) and resource_stream is not None: + # http://bugs.launchpad.net/bugs/383171 - we avoid using this + # unless absolutely necessary to help when a broken version of + # pkg_resources is installed. + return resource_stream(__name__, 'zoneinfo/' + name) + return open(filename, 'rb') + + +def resource_exists(name): + """Return true if the given resource exists""" + try: + open_resource(name).close() + return True + except IOError: + return False + + +# Enable this when we get some translations? +# We want an i18n API that is useful to programs using Python's gettext +# module, as well as the Zope3 i18n package. Perhaps we should just provide +# the POT file and translations, and leave it up to callers to make use +# of them. +# +# t = gettext.translation( +# 'pytz', os.path.join(os.path.dirname(__file__), 'locales'), +# fallback=True +# ) +# def _(timezone_name): +# """Translate a timezone name using the current locale, returning Unicode""" +# return t.ugettext(timezone_name) + + +_tzinfo_cache = {} + +def timezone(zone): + r''' Return a datetime.tzinfo implementation for the given timezone + + >>> from datetime import datetime, timedelta + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> eastern.zone + 'US/Eastern' + >>> timezone(unicode('US/Eastern')) is eastern + True + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + >>> (loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + >>> eastern.normalize(loc_dt - timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + >>> (loc_dt + timedelta(minutes=10)).strftime(fmt) + '2002-10-27 01:10:00 EST (-0500)' + + Raises UnknownTimeZoneError if passed an unknown zone. + + >>> try: + ... timezone('Asia/Shangri-La') + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + >>> try: + ... timezone(unicode('\N{TRADE MARK SIGN}')) + ... except UnknownTimeZoneError: + ... print('Unknown') + Unknown + + ''' + if zone.upper() == 'UTC': + return utc + + try: + zone = ascii(zone) + except UnicodeEncodeError: + # All valid timezones are ASCII + raise UnknownTimeZoneError(zone) + + zone = _unmunge_zone(zone) + if zone not in _tzinfo_cache: + if zone in all_timezones_set: + fp = open_resource(zone) + try: + _tzinfo_cache[zone] = build_tzinfo(zone, fp) + finally: + fp.close() + else: + raise UnknownTimeZoneError(zone) + + return _tzinfo_cache[zone] + + +def _unmunge_zone(zone): + """Undo the time zone name munging done by older versions of pytz.""" + return zone.replace('_plus_', '+').replace('_minus_', '-') + + +ZERO = datetime.timedelta(0) +HOUR = datetime.timedelta(hours=1) + + +class UTC(datetime.tzinfo): + """UTC + + Optimized UTC implementation. It unpickles using the single module global + instance defined beneath this class declaration. + """ + zone = "UTC" + + _utcoffset = ZERO + _dst = ZERO + _tzname = zone + + def fromutc(self, dt): + if dt.tzinfo is None: + return self.localize(dt) + return super(utc.__class__, self).fromutc(dt) + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + + def __reduce__(self): + return _UTC, () + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return "" + + def __str__(self): + return "UTC" + + +UTC = utc = UTC() # UTC is a singleton + + +def _UTC(): + """Factory function for utc unpickling. + + Makes sure that unpickling a utc instance always returns the same + module global. + + These examples belong in the UTC class above, but it is obscured; or in + the README.txt, but we are not depending on Python 2.4 so integrating + the README.txt examples with the unit tests is not trivial. + + >>> import datetime, pickle + >>> dt = datetime.datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc) + >>> naive = dt.replace(tzinfo=None) + >>> p = pickle.dumps(dt, 1) + >>> naive_p = pickle.dumps(naive, 1) + >>> len(p) - len(naive_p) + 17 + >>> new = pickle.loads(p) + >>> new == dt + True + >>> new is dt + False + >>> new.tzinfo is dt.tzinfo + True + >>> utc is UTC is timezone('UTC') + True + >>> utc is timezone('GMT') + False + """ + return utc +_UTC.__safe_for_unpickling__ = True + + +def _p(*args): + """Factory function for unpickling pytz tzinfo instances. + + Just a wrapper around tzinfo.unpickler to save a few bytes in each pickle + by shortening the path. + """ + return unpickler(*args) +_p.__safe_for_unpickling__ = True + + + +class _CountryTimezoneDict(LazyDict): + """Map ISO 3166 country code to a list of timezone names commonly used + in that country. + + iso3166_code is the two letter code used to identify the country. + + >>> def print_list(list_of_strings): + ... 'We use a helper so doctests work under Python 2.3 -> 3.x' + ... for s in list_of_strings: + ... print(s) + + >>> print_list(country_timezones['nz']) + Pacific/Auckland + Pacific/Chatham + >>> print_list(country_timezones['ch']) + Europe/Zurich + >>> print_list(country_timezones['CH']) + Europe/Zurich + >>> print_list(country_timezones[unicode('ch')]) + Europe/Zurich + >>> print_list(country_timezones['XXX']) + Traceback (most recent call last): + ... + KeyError: 'XXX' + + Previously, this information was exposed as a function rather than a + dictionary. This is still supported:: + + >>> print_list(country_timezones('nz')) + Pacific/Auckland + Pacific/Chatham + """ + def __call__(self, iso3166_code): + """Backwards compatibility.""" + return self[iso3166_code] + + def _fill(self): + data = {} + zone_tab = open_resource('zone.tab') + try: + for line in zone_tab: + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, coordinates, zone = line.split(None, 4)[:3] + if zone not in all_timezones_set: + continue + try: + data[code].append(zone) + except KeyError: + data[code] = [zone] + self.data = data + finally: + zone_tab.close() + +country_timezones = _CountryTimezoneDict() + + +class _CountryNameDict(LazyDict): + '''Dictionary proving ISO3166 code -> English name. + + >>> print(country_names['au']) + Australia + ''' + def _fill(self): + data = {} + zone_tab = open_resource('iso3166.tab') + try: + for line in zone_tab.readlines(): + line = line.decode('US-ASCII') + if line.startswith('#'): + continue + code, name = line.split(None, 1) + data[code] = name.strip() + self.data = data + finally: + zone_tab.close() + +country_names = _CountryNameDict() + + +# Time-zone info based solely on fixed offsets + +class _FixedOffset(datetime.tzinfo): + + zone = None # to match the standard pytz API + + def __init__(self, minutes): + if abs(minutes) >= 1440: + raise ValueError("absolute offset is too large", minutes) + self._minutes = minutes + self._offset = datetime.timedelta(minutes=minutes) + + def utcoffset(self, dt): + return self._offset + + def __reduce__(self): + return FixedOffset, (self._minutes, ) + + def dst(self, dt): + return ZERO + + def tzname(self, dt): + return None + + def __repr__(self): + return 'pytz.FixedOffset(%d)' % self._minutes + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.replace(tzinfo=self) + + +def FixedOffset(offset, _tzinfos = {}): + """return a fixed-offset timezone based off a number of minutes. + + >>> one = FixedOffset(-330) + >>> one + pytz.FixedOffset(-330) + >>> one.utcoffset(datetime.datetime.now()) + datetime.timedelta(-1, 66600) + >>> one.dst(datetime.datetime.now()) + datetime.timedelta(0) + + >>> two = FixedOffset(1380) + >>> two + pytz.FixedOffset(1380) + >>> two.utcoffset(datetime.datetime.now()) + datetime.timedelta(0, 82800) + >>> two.dst(datetime.datetime.now()) + datetime.timedelta(0) + + The datetime.timedelta must be between the range of -1 and 1 day, + non-inclusive. + + >>> FixedOffset(1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', 1440) + + >>> FixedOffset(-1440) + Traceback (most recent call last): + ... + ValueError: ('absolute offset is too large', -1440) + + An offset of 0 is special-cased to return UTC. + + >>> FixedOffset(0) is UTC + True + + There should always be only one instance of a FixedOffset per timedelta. + This should be true for multiple creation calls. + + >>> FixedOffset(-330) is one + True + >>> FixedOffset(1380) is two + True + + It should also be true for pickling. + + >>> import pickle + >>> pickle.loads(pickle.dumps(one)) is one + True + >>> pickle.loads(pickle.dumps(two)) is two + True + """ + if offset == 0: + return UTC + + info = _tzinfos.get(offset) + if info is None: + # We haven't seen this one before. we need to save it. + + # Use setdefault to avoid a race condition and make sure we have + # only one + info = _tzinfos.setdefault(offset, _FixedOffset(offset)) + + return info + +FixedOffset.__safe_for_unpickling__ = True + + +def _test(): + import doctest, os, sys + sys.path.insert(0, os.pardir) + import pytz + return doctest.testmod(pytz) + +if __name__ == '__main__': + _test() + +all_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Asmera', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Timbuktu', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/ComodRivadavia', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Atka', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Buenos_Aires', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Catamarca', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Coral_Harbour', + 'America/Cordoba', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Ensenada', + 'America/Fort_Wayne', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Indianapolis', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Jujuy', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Knox_IN', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Louisville', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Mendoza', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Acre', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Rosario', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Shiprock', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Virgin', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/South_Pole', + 'Antarctica/Syowa', + 'Antarctica/Troll', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Ashkhabad', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Calcutta', + 'Asia/Chita', + 'Asia/Choibalsan', + 'Asia/Chongqing', + 'Asia/Chungking', + 'Asia/Colombo', + 'Asia/Dacca', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Harbin', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Istanbul', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kashgar', + 'Asia/Kathmandu', + 'Asia/Katmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macao', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Saigon', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Srednekolymsk', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Tel_Aviv', + 'Asia/Thimbu', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ujung_Pandang', + 'Asia/Ulaanbaatar', + 'Asia/Ulan_Bator', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faeroe', + 'Atlantic/Faroe', + 'Atlantic/Jan_Mayen', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/ACT', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Canberra', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/LHI', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/NSW', + 'Australia/North', + 'Australia/Perth', + 'Australia/Queensland', + 'Australia/South', + 'Australia/Sydney', + 'Australia/Tasmania', + 'Australia/Victoria', + 'Australia/West', + 'Australia/Yancowinna', + 'Brazil/Acre', + 'Brazil/DeNoronha', + 'Brazil/East', + 'Brazil/West', + 'CET', + 'CST6CDT', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/East-Saskatchewan', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Canada/Saskatchewan', + 'Canada/Yukon', + 'Chile/Continental', + 'Chile/EasterIsland', + 'Cuba', + 'EET', + 'EST', + 'EST5EDT', + 'Egypt', + 'Eire', + 'Etc/GMT', + 'Etc/GMT+0', + 'Etc/GMT+1', + 'Etc/GMT+10', + 'Etc/GMT+11', + 'Etc/GMT+12', + 'Etc/GMT+2', + 'Etc/GMT+3', + 'Etc/GMT+4', + 'Etc/GMT+5', + 'Etc/GMT+6', + 'Etc/GMT+7', + 'Etc/GMT+8', + 'Etc/GMT+9', + 'Etc/GMT-0', + 'Etc/GMT-1', + 'Etc/GMT-10', + 'Etc/GMT-11', + 'Etc/GMT-12', + 'Etc/GMT-13', + 'Etc/GMT-14', + 'Etc/GMT-2', + 'Etc/GMT-3', + 'Etc/GMT-4', + 'Etc/GMT-5', + 'Etc/GMT-6', + 'Etc/GMT-7', + 'Etc/GMT-8', + 'Etc/GMT-9', + 'Etc/GMT0', + 'Etc/Greenwich', + 'Etc/UCT', + 'Etc/UTC', + 'Etc/Universal', + 'Etc/Zulu', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belfast', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Nicosia', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Tiraspol', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GB', + 'GB-Eire', + 'GMT', + 'GMT+0', + 'GMT-0', + 'GMT0', + 'Greenwich', + 'HST', + 'Hongkong', + 'Iceland', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Iran', + 'Israel', + 'Jamaica', + 'Japan', + 'Kwajalein', + 'Libya', + 'MET', + 'MST', + 'MST7MDT', + 'Mexico/BajaNorte', + 'Mexico/BajaSur', + 'Mexico/General', + 'NZ', + 'NZ-CHAT', + 'Navajo', + 'PRC', + 'PST8PDT', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Bougainville', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Ponape', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Samoa', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Truk', + 'Pacific/Wake', + 'Pacific/Wallis', + 'Pacific/Yap', + 'Poland', + 'Portugal', + 'ROC', + 'ROK', + 'Singapore', + 'Turkey', + 'UCT', + 'US/Alaska', + 'US/Aleutian', + 'US/Arizona', + 'US/Central', + 'US/East-Indiana', + 'US/Eastern', + 'US/Hawaii', + 'US/Indiana-Starke', + 'US/Michigan', + 'US/Mountain', + 'US/Pacific', + 'US/Pacific-New', + 'US/Samoa', + 'UTC', + 'Universal', + 'W-SU', + 'WET', + 'Zulu'] +all_timezones = LazyList( + tz for tz in all_timezones if resource_exists(tz)) + +all_timezones_set = LazySet(all_timezones) +common_timezones = \ +['Africa/Abidjan', + 'Africa/Accra', + 'Africa/Addis_Ababa', + 'Africa/Algiers', + 'Africa/Asmara', + 'Africa/Bamako', + 'Africa/Bangui', + 'Africa/Banjul', + 'Africa/Bissau', + 'Africa/Blantyre', + 'Africa/Brazzaville', + 'Africa/Bujumbura', + 'Africa/Cairo', + 'Africa/Casablanca', + 'Africa/Ceuta', + 'Africa/Conakry', + 'Africa/Dakar', + 'Africa/Dar_es_Salaam', + 'Africa/Djibouti', + 'Africa/Douala', + 'Africa/El_Aaiun', + 'Africa/Freetown', + 'Africa/Gaborone', + 'Africa/Harare', + 'Africa/Johannesburg', + 'Africa/Juba', + 'Africa/Kampala', + 'Africa/Khartoum', + 'Africa/Kigali', + 'Africa/Kinshasa', + 'Africa/Lagos', + 'Africa/Libreville', + 'Africa/Lome', + 'Africa/Luanda', + 'Africa/Lubumbashi', + 'Africa/Lusaka', + 'Africa/Malabo', + 'Africa/Maputo', + 'Africa/Maseru', + 'Africa/Mbabane', + 'Africa/Mogadishu', + 'Africa/Monrovia', + 'Africa/Nairobi', + 'Africa/Ndjamena', + 'Africa/Niamey', + 'Africa/Nouakchott', + 'Africa/Ouagadougou', + 'Africa/Porto-Novo', + 'Africa/Sao_Tome', + 'Africa/Tripoli', + 'Africa/Tunis', + 'Africa/Windhoek', + 'America/Adak', + 'America/Anchorage', + 'America/Anguilla', + 'America/Antigua', + 'America/Araguaina', + 'America/Argentina/Buenos_Aires', + 'America/Argentina/Catamarca', + 'America/Argentina/Cordoba', + 'America/Argentina/Jujuy', + 'America/Argentina/La_Rioja', + 'America/Argentina/Mendoza', + 'America/Argentina/Rio_Gallegos', + 'America/Argentina/Salta', + 'America/Argentina/San_Juan', + 'America/Argentina/San_Luis', + 'America/Argentina/Tucuman', + 'America/Argentina/Ushuaia', + 'America/Aruba', + 'America/Asuncion', + 'America/Atikokan', + 'America/Bahia', + 'America/Bahia_Banderas', + 'America/Barbados', + 'America/Belem', + 'America/Belize', + 'America/Blanc-Sablon', + 'America/Boa_Vista', + 'America/Bogota', + 'America/Boise', + 'America/Cambridge_Bay', + 'America/Campo_Grande', + 'America/Cancun', + 'America/Caracas', + 'America/Cayenne', + 'America/Cayman', + 'America/Chicago', + 'America/Chihuahua', + 'America/Costa_Rica', + 'America/Creston', + 'America/Cuiaba', + 'America/Curacao', + 'America/Danmarkshavn', + 'America/Dawson', + 'America/Dawson_Creek', + 'America/Denver', + 'America/Detroit', + 'America/Dominica', + 'America/Edmonton', + 'America/Eirunepe', + 'America/El_Salvador', + 'America/Fortaleza', + 'America/Glace_Bay', + 'America/Godthab', + 'America/Goose_Bay', + 'America/Grand_Turk', + 'America/Grenada', + 'America/Guadeloupe', + 'America/Guatemala', + 'America/Guayaquil', + 'America/Guyana', + 'America/Halifax', + 'America/Havana', + 'America/Hermosillo', + 'America/Indiana/Indianapolis', + 'America/Indiana/Knox', + 'America/Indiana/Marengo', + 'America/Indiana/Petersburg', + 'America/Indiana/Tell_City', + 'America/Indiana/Vevay', + 'America/Indiana/Vincennes', + 'America/Indiana/Winamac', + 'America/Inuvik', + 'America/Iqaluit', + 'America/Jamaica', + 'America/Juneau', + 'America/Kentucky/Louisville', + 'America/Kentucky/Monticello', + 'America/Kralendijk', + 'America/La_Paz', + 'America/Lima', + 'America/Los_Angeles', + 'America/Lower_Princes', + 'America/Maceio', + 'America/Managua', + 'America/Manaus', + 'America/Marigot', + 'America/Martinique', + 'America/Matamoros', + 'America/Mazatlan', + 'America/Menominee', + 'America/Merida', + 'America/Metlakatla', + 'America/Mexico_City', + 'America/Miquelon', + 'America/Moncton', + 'America/Monterrey', + 'America/Montevideo', + 'America/Montreal', + 'America/Montserrat', + 'America/Nassau', + 'America/New_York', + 'America/Nipigon', + 'America/Nome', + 'America/Noronha', + 'America/North_Dakota/Beulah', + 'America/North_Dakota/Center', + 'America/North_Dakota/New_Salem', + 'America/Ojinaga', + 'America/Panama', + 'America/Pangnirtung', + 'America/Paramaribo', + 'America/Phoenix', + 'America/Port-au-Prince', + 'America/Port_of_Spain', + 'America/Porto_Velho', + 'America/Puerto_Rico', + 'America/Rainy_River', + 'America/Rankin_Inlet', + 'America/Recife', + 'America/Regina', + 'America/Resolute', + 'America/Rio_Branco', + 'America/Santa_Isabel', + 'America/Santarem', + 'America/Santiago', + 'America/Santo_Domingo', + 'America/Sao_Paulo', + 'America/Scoresbysund', + 'America/Sitka', + 'America/St_Barthelemy', + 'America/St_Johns', + 'America/St_Kitts', + 'America/St_Lucia', + 'America/St_Thomas', + 'America/St_Vincent', + 'America/Swift_Current', + 'America/Tegucigalpa', + 'America/Thule', + 'America/Thunder_Bay', + 'America/Tijuana', + 'America/Toronto', + 'America/Tortola', + 'America/Vancouver', + 'America/Whitehorse', + 'America/Winnipeg', + 'America/Yakutat', + 'America/Yellowknife', + 'Antarctica/Casey', + 'Antarctica/Davis', + 'Antarctica/DumontDUrville', + 'Antarctica/Macquarie', + 'Antarctica/Mawson', + 'Antarctica/McMurdo', + 'Antarctica/Palmer', + 'Antarctica/Rothera', + 'Antarctica/Syowa', + 'Antarctica/Troll', + 'Antarctica/Vostok', + 'Arctic/Longyearbyen', + 'Asia/Aden', + 'Asia/Almaty', + 'Asia/Amman', + 'Asia/Anadyr', + 'Asia/Aqtau', + 'Asia/Aqtobe', + 'Asia/Ashgabat', + 'Asia/Baghdad', + 'Asia/Bahrain', + 'Asia/Baku', + 'Asia/Bangkok', + 'Asia/Beirut', + 'Asia/Bishkek', + 'Asia/Brunei', + 'Asia/Chita', + 'Asia/Choibalsan', + 'Asia/Colombo', + 'Asia/Damascus', + 'Asia/Dhaka', + 'Asia/Dili', + 'Asia/Dubai', + 'Asia/Dushanbe', + 'Asia/Gaza', + 'Asia/Hebron', + 'Asia/Ho_Chi_Minh', + 'Asia/Hong_Kong', + 'Asia/Hovd', + 'Asia/Irkutsk', + 'Asia/Jakarta', + 'Asia/Jayapura', + 'Asia/Jerusalem', + 'Asia/Kabul', + 'Asia/Kamchatka', + 'Asia/Karachi', + 'Asia/Kathmandu', + 'Asia/Khandyga', + 'Asia/Kolkata', + 'Asia/Krasnoyarsk', + 'Asia/Kuala_Lumpur', + 'Asia/Kuching', + 'Asia/Kuwait', + 'Asia/Macau', + 'Asia/Magadan', + 'Asia/Makassar', + 'Asia/Manila', + 'Asia/Muscat', + 'Asia/Nicosia', + 'Asia/Novokuznetsk', + 'Asia/Novosibirsk', + 'Asia/Omsk', + 'Asia/Oral', + 'Asia/Phnom_Penh', + 'Asia/Pontianak', + 'Asia/Pyongyang', + 'Asia/Qatar', + 'Asia/Qyzylorda', + 'Asia/Rangoon', + 'Asia/Riyadh', + 'Asia/Sakhalin', + 'Asia/Samarkand', + 'Asia/Seoul', + 'Asia/Shanghai', + 'Asia/Singapore', + 'Asia/Srednekolymsk', + 'Asia/Taipei', + 'Asia/Tashkent', + 'Asia/Tbilisi', + 'Asia/Tehran', + 'Asia/Thimphu', + 'Asia/Tokyo', + 'Asia/Ulaanbaatar', + 'Asia/Urumqi', + 'Asia/Ust-Nera', + 'Asia/Vientiane', + 'Asia/Vladivostok', + 'Asia/Yakutsk', + 'Asia/Yekaterinburg', + 'Asia/Yerevan', + 'Atlantic/Azores', + 'Atlantic/Bermuda', + 'Atlantic/Canary', + 'Atlantic/Cape_Verde', + 'Atlantic/Faroe', + 'Atlantic/Madeira', + 'Atlantic/Reykjavik', + 'Atlantic/South_Georgia', + 'Atlantic/St_Helena', + 'Atlantic/Stanley', + 'Australia/Adelaide', + 'Australia/Brisbane', + 'Australia/Broken_Hill', + 'Australia/Currie', + 'Australia/Darwin', + 'Australia/Eucla', + 'Australia/Hobart', + 'Australia/Lindeman', + 'Australia/Lord_Howe', + 'Australia/Melbourne', + 'Australia/Perth', + 'Australia/Sydney', + 'Canada/Atlantic', + 'Canada/Central', + 'Canada/Eastern', + 'Canada/Mountain', + 'Canada/Newfoundland', + 'Canada/Pacific', + 'Europe/Amsterdam', + 'Europe/Andorra', + 'Europe/Athens', + 'Europe/Belgrade', + 'Europe/Berlin', + 'Europe/Bratislava', + 'Europe/Brussels', + 'Europe/Bucharest', + 'Europe/Budapest', + 'Europe/Busingen', + 'Europe/Chisinau', + 'Europe/Copenhagen', + 'Europe/Dublin', + 'Europe/Gibraltar', + 'Europe/Guernsey', + 'Europe/Helsinki', + 'Europe/Isle_of_Man', + 'Europe/Istanbul', + 'Europe/Jersey', + 'Europe/Kaliningrad', + 'Europe/Kiev', + 'Europe/Lisbon', + 'Europe/Ljubljana', + 'Europe/London', + 'Europe/Luxembourg', + 'Europe/Madrid', + 'Europe/Malta', + 'Europe/Mariehamn', + 'Europe/Minsk', + 'Europe/Monaco', + 'Europe/Moscow', + 'Europe/Oslo', + 'Europe/Paris', + 'Europe/Podgorica', + 'Europe/Prague', + 'Europe/Riga', + 'Europe/Rome', + 'Europe/Samara', + 'Europe/San_Marino', + 'Europe/Sarajevo', + 'Europe/Simferopol', + 'Europe/Skopje', + 'Europe/Sofia', + 'Europe/Stockholm', + 'Europe/Tallinn', + 'Europe/Tirane', + 'Europe/Uzhgorod', + 'Europe/Vaduz', + 'Europe/Vatican', + 'Europe/Vienna', + 'Europe/Vilnius', + 'Europe/Volgograd', + 'Europe/Warsaw', + 'Europe/Zagreb', + 'Europe/Zaporozhye', + 'Europe/Zurich', + 'GMT', + 'Indian/Antananarivo', + 'Indian/Chagos', + 'Indian/Christmas', + 'Indian/Cocos', + 'Indian/Comoro', + 'Indian/Kerguelen', + 'Indian/Mahe', + 'Indian/Maldives', + 'Indian/Mauritius', + 'Indian/Mayotte', + 'Indian/Reunion', + 'Pacific/Apia', + 'Pacific/Auckland', + 'Pacific/Bougainville', + 'Pacific/Chatham', + 'Pacific/Chuuk', + 'Pacific/Easter', + 'Pacific/Efate', + 'Pacific/Enderbury', + 'Pacific/Fakaofo', + 'Pacific/Fiji', + 'Pacific/Funafuti', + 'Pacific/Galapagos', + 'Pacific/Gambier', + 'Pacific/Guadalcanal', + 'Pacific/Guam', + 'Pacific/Honolulu', + 'Pacific/Johnston', + 'Pacific/Kiritimati', + 'Pacific/Kosrae', + 'Pacific/Kwajalein', + 'Pacific/Majuro', + 'Pacific/Marquesas', + 'Pacific/Midway', + 'Pacific/Nauru', + 'Pacific/Niue', + 'Pacific/Norfolk', + 'Pacific/Noumea', + 'Pacific/Pago_Pago', + 'Pacific/Palau', + 'Pacific/Pitcairn', + 'Pacific/Pohnpei', + 'Pacific/Port_Moresby', + 'Pacific/Rarotonga', + 'Pacific/Saipan', + 'Pacific/Tahiti', + 'Pacific/Tarawa', + 'Pacific/Tongatapu', + 'Pacific/Wake', + 'Pacific/Wallis', + 'US/Alaska', + 'US/Arizona', + 'US/Central', + 'US/Eastern', + 'US/Hawaii', + 'US/Mountain', + 'US/Pacific', + 'UTC'] +common_timezones = LazyList( + tz for tz in common_timezones if tz in all_timezones) + +common_timezones_set = LazySet(common_timezones) diff --git a/lib/pytz/exceptions.py b/lib/pytz/exceptions.py new file mode 100644 index 00000000..0376108e --- /dev/null +++ b/lib/pytz/exceptions.py @@ -0,0 +1,48 @@ +''' +Custom exceptions raised by pytz. +''' + +__all__ = [ + 'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError', + 'NonExistentTimeError', + ] + + +class UnknownTimeZoneError(KeyError): + '''Exception raised when pytz is passed an unknown timezone. + + >>> isinstance(UnknownTimeZoneError(), LookupError) + True + + This class is actually a subclass of KeyError to provide backwards + compatibility with code relying on the undocumented behavior of earlier + pytz releases. + + >>> isinstance(UnknownTimeZoneError(), KeyError) + True + ''' + pass + + +class InvalidTimeError(Exception): + '''Base class for invalid time exceptions.''' + + +class AmbiguousTimeError(InvalidTimeError): + '''Exception raised when attempting to create an ambiguous wallclock time. + + At the end of a DST transition period, a particular wallclock time will + occur twice (once before the clocks are set back, once after). Both + possibilities may be correct, unless further information is supplied. + + See DstTzInfo.normalize() for more info + ''' + + +class NonExistentTimeError(InvalidTimeError): + '''Exception raised when attempting to create a wallclock time that + cannot exist. + + At the start of a DST transition period, the wallclock time jumps forward. + The instants jumped over never occur. + ''' diff --git a/lib/pytz/lazy.py b/lib/pytz/lazy.py new file mode 100644 index 00000000..f7fc597c --- /dev/null +++ b/lib/pytz/lazy.py @@ -0,0 +1,168 @@ +from threading import RLock +try: + from UserDict import DictMixin +except ImportError: + from collections import Mapping as DictMixin + + +# With lazy loading, we might end up with multiple threads triggering +# it at the same time. We need a lock. +_fill_lock = RLock() + + +class LazyDict(DictMixin): + """Dictionary populated on first use.""" + data = None + def __getitem__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data[key.upper()] + + def __contains__(self, key): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return key in self.data + + def __iter__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return iter(self.data) + + def __len__(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return len(self.data) + + def keys(self): + if self.data is None: + _fill_lock.acquire() + try: + if self.data is None: + self._fill() + finally: + _fill_lock.release() + return self.data.keys() + + +class LazyList(list): + """List populated on first use.""" + + _props = [ + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + 'append', 'count', 'index', 'extend', 'insert', 'pop', 'remove', + 'reverse', 'sort', '__add__', '__radd__', '__iadd__', '__mul__', + '__rmul__', '__imul__', '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__reversed__', '__getslice__', '__setslice__', '__delslice__'] + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return list() + + # We need a new class as we will be dynamically messing with its + # methods. + class LazyList(list): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + list.extend(self, fill_iter.pop()) + for method_name in cls._props: + delattr(LazyList, method_name) + finally: + _fill_lock.release() + return getattr(list, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazyList, name, lazy(name)) + + new_list = LazyList() + return new_list + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazyList._props = [prop for prop in LazyList._props if hasattr(list, prop)] + + +class LazySet(set): + """Set populated on first use.""" + + _props = ( + '__str__', '__repr__', '__unicode__', + '__hash__', '__sizeof__', '__cmp__', + '__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__', + '__contains__', '__len__', '__nonzero__', + '__getitem__', '__setitem__', '__delitem__', '__iter__', + '__sub__', '__and__', '__xor__', '__or__', + '__rsub__', '__rand__', '__rxor__', '__ror__', + '__isub__', '__iand__', '__ixor__', '__ior__', + 'add', 'clear', 'copy', 'difference', 'difference_update', + 'discard', 'intersection', 'intersection_update', 'isdisjoint', + 'issubset', 'issuperset', 'pop', 'remove', + 'symmetric_difference', 'symmetric_difference_update', + 'union', 'update') + + def __new__(cls, fill_iter=None): + + if fill_iter is None: + return set() + + class LazySet(set): + pass + + fill_iter = [fill_iter] + + def lazy(name): + def _lazy(self, *args, **kw): + _fill_lock.acquire() + try: + if len(fill_iter) > 0: + for i in fill_iter.pop(): + set.add(self, i) + for method_name in cls._props: + delattr(LazySet, method_name) + finally: + _fill_lock.release() + return getattr(set, name)(self, *args, **kw) + return _lazy + + for name in cls._props: + setattr(LazySet, name, lazy(name)) + + new_set = LazySet() + return new_set + +# Not all versions of Python declare the same magic methods. +# Filter out properties that don't exist in this version of Python +# from the list. +LazySet._props = [prop for prop in LazySet._props if hasattr(set, prop)] diff --git a/lib/pytz/reference.py b/lib/pytz/reference.py new file mode 100644 index 00000000..3dda13e7 --- /dev/null +++ b/lib/pytz/reference.py @@ -0,0 +1,127 @@ +''' +Reference tzinfo implementations from the Python docs. +Used for testing against as they are only correct for the years +1987 to 2006. Do not use these for real code. +''' + +from datetime import tzinfo, timedelta, datetime +from pytz import utc, UTC, HOUR, ZERO + +# A class building tzinfo objects for fixed-offset time zones. +# Note that FixedOffset(0, "UTC") is a different way to build a +# UTC tzinfo object. + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes = offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + +# A class capturing the platform's idea of local time. + +import time as _time + +STDOFFSET = timedelta(seconds = -_time.timezone) +if _time.daylight: + DSTOFFSET = timedelta(seconds = -_time.altzone) +else: + DSTOFFSET = STDOFFSET + +DSTDIFF = DSTOFFSET - STDOFFSET + +class LocalTimezone(tzinfo): + + def utcoffset(self, dt): + if self._isdst(dt): + return DSTOFFSET + else: + return STDOFFSET + + def dst(self, dt): + if self._isdst(dt): + return DSTDIFF + else: + return ZERO + + def tzname(self, dt): + return _time.tzname[self._isdst(dt)] + + def _isdst(self, dt): + tt = (dt.year, dt.month, dt.day, + dt.hour, dt.minute, dt.second, + dt.weekday(), 0, -1) + stamp = _time.mktime(tt) + tt = _time.localtime(stamp) + return tt.tm_isdst > 0 + +Local = LocalTimezone() + +# A complete implementation of current DST rules for major US time zones. + +def first_sunday_on_or_after(dt): + days_to_go = 6 - dt.weekday() + if days_to_go: + dt += timedelta(days_to_go) + return dt + +# In the US, DST starts at 2am (standard time) on the first Sunday in April. +DSTSTART = datetime(1, 4, 1, 2) +# and ends at 2am (DST time; 1am standard time) on the last Sunday of Oct. +# which is the first Sunday on or after Oct 25. +DSTEND = datetime(1, 10, 25, 1) + +class USTimeZone(tzinfo): + + def __init__(self, hours, reprname, stdname, dstname): + self.stdoffset = timedelta(hours=hours) + self.reprname = reprname + self.stdname = stdname + self.dstname = dstname + + def __repr__(self): + return self.reprname + + def tzname(self, dt): + if self.dst(dt): + return self.dstname + else: + return self.stdname + + def utcoffset(self, dt): + return self.stdoffset + self.dst(dt) + + def dst(self, dt): + if dt is None or dt.tzinfo is None: + # An exception may be sensible here, in one or both cases. + # It depends on how you want to treat them. The default + # fromutc() implementation (called by the default astimezone() + # implementation) passes a datetime with dt.tzinfo is self. + return ZERO + assert dt.tzinfo is self + + # Find first Sunday in April & the last in October. + start = first_sunday_on_or_after(DSTSTART.replace(year=dt.year)) + end = first_sunday_on_or_after(DSTEND.replace(year=dt.year)) + + # Can't compare naive to aware objects, so strip the timezone from + # dt first. + if start <= dt.replace(tzinfo=None) < end: + return HOUR + else: + return ZERO + +Eastern = USTimeZone(-5, "Eastern", "EST", "EDT") +Central = USTimeZone(-6, "Central", "CST", "CDT") +Mountain = USTimeZone(-7, "Mountain", "MST", "MDT") +Pacific = USTimeZone(-8, "Pacific", "PST", "PDT") + diff --git a/lib/pytz/tzfile.py b/lib/pytz/tzfile.py new file mode 100644 index 00000000..9c007c80 --- /dev/null +++ b/lib/pytz/tzfile.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +''' +$Id: tzfile.py,v 1.8 2004/06/03 00:15:24 zenzen Exp $ +''' + +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO +from datetime import datetime, timedelta +from struct import unpack, calcsize + +from pytz.tzinfo import StaticTzInfo, DstTzInfo, memorized_ttinfo +from pytz.tzinfo import memorized_datetime, memorized_timedelta + +def _byte_string(s): + """Cast a string or byte string to an ASCII byte string.""" + return s.encode('US-ASCII') + +_NULL = _byte_string('\0') + +def _std_string(s): + """Cast a string or byte string to an ASCII string.""" + return str(s.decode('US-ASCII')) + +def build_tzinfo(zone, fp): + head_fmt = '>4s c 15x 6l' + head_size = calcsize(head_fmt) + (magic, format, ttisgmtcnt, ttisstdcnt,leapcnt, timecnt, + typecnt, charcnt) = unpack(head_fmt, fp.read(head_size)) + + # Make sure it is a tzfile(5) file + assert magic == _byte_string('TZif'), 'Got magic %s' % repr(magic) + + # Read out the transition times, localtime indices and ttinfo structures. + data_fmt = '>%(timecnt)dl %(timecnt)dB %(ttinfo)s %(charcnt)ds' % dict( + timecnt=timecnt, ttinfo='lBB'*typecnt, charcnt=charcnt) + data_size = calcsize(data_fmt) + data = unpack(data_fmt, fp.read(data_size)) + + # make sure we unpacked the right number of values + assert len(data) == 2 * timecnt + 3 * typecnt + 1 + transitions = [memorized_datetime(trans) + for trans in data[:timecnt]] + lindexes = list(data[timecnt:2 * timecnt]) + ttinfo_raw = data[2 * timecnt:-1] + tznames_raw = data[-1] + del data + + # Process ttinfo into separate structs + ttinfo = [] + tznames = {} + i = 0 + while i < len(ttinfo_raw): + # have we looked up this timezone name yet? + tzname_offset = ttinfo_raw[i+2] + if tzname_offset not in tznames: + nul = tznames_raw.find(_NULL, tzname_offset) + if nul < 0: + nul = len(tznames_raw) + tznames[tzname_offset] = _std_string( + tznames_raw[tzname_offset:nul]) + ttinfo.append((ttinfo_raw[i], + bool(ttinfo_raw[i+1]), + tznames[tzname_offset])) + i += 3 + + # Now build the timezone object + if len(transitions) == 0: + ttinfo[0][0], ttinfo[0][2] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(ttinfo[0][0]), + _tzname=ttinfo[0][2])) + else: + # Early dates use the first standard time ttinfo + i = 0 + while ttinfo[i][1]: + i += 1 + if ttinfo[i] == ttinfo[lindexes[0]]: + transitions[0] = datetime.min + else: + transitions.insert(0, datetime.min) + lindexes.insert(0, i) + + # calculate transition info + transition_info = [] + for i in range(len(transitions)): + inf = ttinfo[lindexes[i]] + utcoffset = inf[0] + if not inf[1]: + dst = 0 + else: + for j in range(i-1, -1, -1): + prev_inf = ttinfo[lindexes[j]] + if not prev_inf[1]: + break + dst = inf[0] - prev_inf[0] # dst offset + + # Bad dst? Look further. DST > 24 hours happens when + # a timzone has moved across the international dateline. + if dst <= 0 or dst > 3600*3: + for j in range(i+1, len(transitions)): + stdinf = ttinfo[lindexes[j]] + if not stdinf[1]: + dst = inf[0] - stdinf[0] + if dst > 0: + break # Found a useful std time. + + tzname = inf[2] + + # Round utcoffset and dst to the nearest minute or the + # datetime library will complain. Conversions to these timezones + # might be up to plus or minus 30 seconds out, but it is + # the best we can do. + utcoffset = int((utcoffset + 30) // 60) * 60 + dst = int((dst + 30) // 60) * 60 + transition_info.append(memorized_ttinfo(utcoffset, dst, tzname)) + + cls = type(zone, (DstTzInfo,), dict( + zone=zone, + _utc_transition_times=transitions, + _transition_info=transition_info)) + + return cls() + +if __name__ == '__main__': + import os.path + from pprint import pprint + base = os.path.join(os.path.dirname(__file__), 'zoneinfo') + tz = build_tzinfo('Australia/Melbourne', + open(os.path.join(base,'Australia','Melbourne'), 'rb')) + tz = build_tzinfo('US/Eastern', + open(os.path.join(base,'US','Eastern'), 'rb')) + pprint(tz._utc_transition_times) + #print tz.asPython(4) + #print tz.transitions_mapping diff --git a/lib/pytz/tzinfo.py b/lib/pytz/tzinfo.py new file mode 100644 index 00000000..1318872d --- /dev/null +++ b/lib/pytz/tzinfo.py @@ -0,0 +1,564 @@ +'''Base classes and helpers for building zone specific tzinfo classes''' + +from datetime import datetime, timedelta, tzinfo +from bisect import bisect_right +try: + set +except NameError: + from sets import Set as set + +import pytz +from pytz.exceptions import AmbiguousTimeError, NonExistentTimeError + +__all__ = [] + +_timedelta_cache = {} +def memorized_timedelta(seconds): + '''Create only one instance of each distinct timedelta''' + try: + return _timedelta_cache[seconds] + except KeyError: + delta = timedelta(seconds=seconds) + _timedelta_cache[seconds] = delta + return delta + +_epoch = datetime.utcfromtimestamp(0) +_datetime_cache = {0: _epoch} +def memorized_datetime(seconds): + '''Create only one instance of each distinct datetime''' + try: + return _datetime_cache[seconds] + except KeyError: + # NB. We can't just do datetime.utcfromtimestamp(seconds) as this + # fails with negative values under Windows (Bug #90096) + dt = _epoch + timedelta(seconds=seconds) + _datetime_cache[seconds] = dt + return dt + +_ttinfo_cache = {} +def memorized_ttinfo(*args): + '''Create only one instance of each distinct tuple''' + try: + return _ttinfo_cache[args] + except KeyError: + ttinfo = ( + memorized_timedelta(args[0]), + memorized_timedelta(args[1]), + args[2] + ) + _ttinfo_cache[args] = ttinfo + return ttinfo + +_notime = memorized_timedelta(0) + +def _to_seconds(td): + '''Convert a timedelta to seconds''' + return td.seconds + td.days * 24 * 60 * 60 + + +class BaseTzInfo(tzinfo): + # Overridden in subclass + _utcoffset = None + _tzname = None + zone = None + + def __str__(self): + return self.zone + + +class StaticTzInfo(BaseTzInfo): + '''A timezone that has a constant offset from UTC + + These timezones are rare, as most locations have changed their + offset at some point in their history + ''' + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if dt.tzinfo is not None and dt.tzinfo is not self: + raise ValueError('fromutc: dt.tzinfo is not self') + return (dt + self._utcoffset).replace(tzinfo=self) + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return _notime + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + is_dst is ignored for StaticTzInfo, and exists only to + retain compatibility with DstTzInfo. + ''' + return self._tzname + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + return dt.replace(tzinfo=self) + + def normalize(self, dt, is_dst=False): + '''Correct the timezone information on the given datetime. + + This is normally a no-op, as StaticTzInfo timezones never have + ambiguous cases to correct: + + >>> from pytz import timezone + >>> gmt = timezone('GMT') + >>> isinstance(gmt, StaticTzInfo) + True + >>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt) + >>> gmt.normalize(dt) is dt + True + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently normalize() also works: + + >>> la = timezone('America/Los_Angeles') + >>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> gmt.normalize(dt).strftime(fmt) + '2011-05-07 08:02:03 GMT (+0000)' + ''' + if dt.tzinfo is self: + return dt + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + return dt.astimezone(self) + + def __repr__(self): + return '' % (self.zone,) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, (self.zone,) + + +class DstTzInfo(BaseTzInfo): + '''A timezone that has a variable offset from UTC + + The offset might change if daylight saving time comes into effect, + or at a point in history when the region decides to change their + timezone definition. + ''' + # Overridden in subclass + _utc_transition_times = None # Sorted list of DST transition times in UTC + _transition_info = None # [(utcoffset, dstoffset, tzname)] corresponding + # to _utc_transition_times entries + zone = None + + # Set in __init__ + _tzinfos = None + _dst = None # DST offset + + def __init__(self, _inf=None, _tzinfos=None): + if _inf: + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = _inf + else: + _tzinfos = {} + self._tzinfos = _tzinfos + self._utcoffset, self._dst, self._tzname = self._transition_info[0] + _tzinfos[self._transition_info[0]] = self + for inf in self._transition_info[1:]: + if inf not in _tzinfos: + _tzinfos[inf] = self.__class__(inf, _tzinfos) + + def fromutc(self, dt): + '''See datetime.tzinfo.fromutc''' + if (dt.tzinfo is not None + and getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos): + raise ValueError('fromutc: dt.tzinfo is not self') + dt = dt.replace(tzinfo=None) + idx = max(0, bisect_right(self._utc_transition_times, dt) - 1) + inf = self._transition_info[idx] + return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) + + def normalize(self, dt): + '''Correct the timezone information on the given datetime + + If date arithmetic crosses DST boundaries, the tzinfo + is not magically adjusted. This method normalizes the + tzinfo to the correct one. + + To test, first we need to do some setup + + >>> from pytz import timezone + >>> utc = timezone('UTC') + >>> eastern = timezone('US/Eastern') + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + + We next create a datetime right on an end-of-DST transition point, + the instant when the wallclocks are wound back one hour. + + >>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc) + >>> loc_dt = utc_dt.astimezone(eastern) + >>> loc_dt.strftime(fmt) + '2002-10-27 01:00:00 EST (-0500)' + + Now, if we subtract a few minutes from it, note that the timezone + information has not changed. + + >>> before = loc_dt - timedelta(minutes=10) + >>> before.strftime(fmt) + '2002-10-27 00:50:00 EST (-0500)' + + But we can fix that by calling the normalize method + + >>> before = eastern.normalize(before) + >>> before.strftime(fmt) + '2002-10-27 01:50:00 EDT (-0400)' + + The supported method of converting between timezones is to use + datetime.astimezone(). Currently, normalize() also works: + + >>> th = timezone('Asia/Bangkok') + >>> am = timezone('Europe/Amsterdam') + >>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3)) + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> am.normalize(dt).strftime(fmt) + '2011-05-06 20:02:03 CEST (+0200)' + ''' + if dt.tzinfo is None: + raise ValueError('Naive time - no tzinfo set') + + # Convert dt in localtime to UTC + offset = dt.tzinfo._utcoffset + dt = dt.replace(tzinfo=None) + dt = dt - offset + # convert it back, and return it + return self.fromutc(dt) + + def localize(self, dt, is_dst=False): + '''Convert naive time to local time. + + This method should be used to construct localtimes, rather + than passing a tzinfo argument to a datetime constructor. + + is_dst is used to determine the correct timezone in the ambigous + period at the end of daylight saving time. + + >>> from pytz import timezone + >>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)' + >>> amdam = timezone('Europe/Amsterdam') + >>> dt = datetime(2004, 10, 31, 2, 0, 0) + >>> loc_dt1 = amdam.localize(dt, is_dst=True) + >>> loc_dt2 = amdam.localize(dt, is_dst=False) + >>> loc_dt1.strftime(fmt) + '2004-10-31 02:00:00 CEST (+0200)' + >>> loc_dt2.strftime(fmt) + '2004-10-31 02:00:00 CET (+0100)' + >>> str(loc_dt2 - loc_dt1) + '1:00:00' + + Use is_dst=None to raise an AmbiguousTimeError for ambiguous + times at the end of daylight saving time + + >>> try: + ... loc_dt1 = amdam.localize(dt, is_dst=None) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + is_dst defaults to False + + >>> amdam.localize(dt) == amdam.localize(dt, False) + True + + is_dst is also used to determine the correct timezone in the + wallclock times jumped over at the start of daylight saving time. + + >>> pacific = timezone('US/Pacific') + >>> dt = datetime(2008, 3, 9, 2, 0, 0) + >>> ploc_dt1 = pacific.localize(dt, is_dst=True) + >>> ploc_dt2 = pacific.localize(dt, is_dst=False) + >>> ploc_dt1.strftime(fmt) + '2008-03-09 02:00:00 PDT (-0700)' + >>> ploc_dt2.strftime(fmt) + '2008-03-09 02:00:00 PST (-0800)' + >>> str(ploc_dt2 - ploc_dt1) + '1:00:00' + + Use is_dst=None to raise a NonExistentTimeError for these skipped + times. + + >>> try: + ... loc_dt1 = pacific.localize(dt, is_dst=None) + ... except NonExistentTimeError: + ... print('Non-existent') + Non-existent + ''' + if dt.tzinfo is not None: + raise ValueError('Not naive datetime (tzinfo is already set)') + + # Find the two best possibilities. + possible_loc_dt = set() + for delta in [timedelta(days=-1), timedelta(days=1)]: + loc_dt = dt + delta + idx = max(0, bisect_right( + self._utc_transition_times, loc_dt) - 1) + inf = self._transition_info[idx] + tzinfo = self._tzinfos[inf] + loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) + if loc_dt.replace(tzinfo=None) == dt: + possible_loc_dt.add(loc_dt) + + if len(possible_loc_dt) == 1: + return possible_loc_dt.pop() + + # If there are no possibly correct timezones, we are attempting + # to convert a time that never happened - the time period jumped + # during the start-of-DST transition period. + if len(possible_loc_dt) == 0: + # If we refuse to guess, raise an exception. + if is_dst is None: + raise NonExistentTimeError(dt) + + # If we are forcing the pre-DST side of the DST transition, we + # obtain the correct timezone by winding the clock forward a few + # hours. + elif is_dst: + return self.localize( + dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6) + + # If we are forcing the post-DST side of the DST transition, we + # obtain the correct timezone by winding the clock back. + else: + return self.localize( + dt - timedelta(hours=6), is_dst=False) + timedelta(hours=6) + + + # If we get this far, we have multiple possible timezones - this + # is an ambiguous case occuring during the end-of-DST transition. + + # If told to be strict, raise an exception since we have an + # ambiguous case + if is_dst is None: + raise AmbiguousTimeError(dt) + + # Filter out the possiblilities that don't match the requested + # is_dst + filtered_possible_loc_dt = [ + p for p in possible_loc_dt + if bool(p.tzinfo._dst) == is_dst + ] + + # Hopefully we only have one possibility left. Return it. + if len(filtered_possible_loc_dt) == 1: + return filtered_possible_loc_dt[0] + + if len(filtered_possible_loc_dt) == 0: + filtered_possible_loc_dt = list(possible_loc_dt) + + # If we get this far, we have in a wierd timezone transition + # where the clocks have been wound back but is_dst is the same + # in both (eg. Europe/Warsaw 1915 when they switched to CET). + # At this point, we just have to guess unless we allow more + # hints to be passed in (such as the UTC offset or abbreviation), + # but that is just getting silly. + # + # Choose the earliest (by UTC) applicable timezone if is_dst=True + # Choose the latest (by UTC) applicable timezone if is_dst=False + # i.e., behave like end-of-DST transition + dates = {} # utc -> local + for local_dt in filtered_possible_loc_dt: + utc_time = local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset + assert utc_time not in dates + dates[utc_time] = local_dt + return dates[[min, max][not is_dst](dates)] + + def utcoffset(self, dt, is_dst=None): + '''See datetime.tzinfo.utcoffset + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.utcoffset(ambiguous, is_dst=False) + datetime.timedelta(-1, 73800) + + >>> tz.utcoffset(ambiguous, is_dst=True) + datetime.timedelta(-1, 77400) + + >>> try: + ... tz.utcoffset(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._utcoffset + else: + return self._utcoffset + + def dst(self, dt, is_dst=None): + '''See datetime.tzinfo.dst + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.dst(normal) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=False) + datetime.timedelta(0, 3600) + >>> tz.dst(normal, is_dst=True) + datetime.timedelta(0, 3600) + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.dst(ambiguous, is_dst=False) + datetime.timedelta(0) + >>> tz.dst(ambiguous, is_dst=True) + datetime.timedelta(0, 3600) + >>> try: + ... tz.dst(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + + ''' + if dt is None: + return None + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._dst + else: + return self._dst + + def tzname(self, dt, is_dst=None): + '''See datetime.tzinfo.tzname + + The is_dst parameter may be used to remove ambiguity during DST + transitions. + + >>> from pytz import timezone + >>> tz = timezone('America/St_Johns') + + >>> normal = datetime(2009, 9, 1) + + >>> tz.tzname(normal) + 'NDT' + >>> tz.tzname(normal, is_dst=False) + 'NDT' + >>> tz.tzname(normal, is_dst=True) + 'NDT' + + >>> ambiguous = datetime(2009, 10, 31, 23, 30) + + >>> tz.tzname(ambiguous, is_dst=False) + 'NST' + >>> tz.tzname(ambiguous, is_dst=True) + 'NDT' + >>> try: + ... tz.tzname(ambiguous) + ... except AmbiguousTimeError: + ... print('Ambiguous') + Ambiguous + ''' + if dt is None: + return self.zone + elif dt.tzinfo is not self: + dt = self.localize(dt, is_dst) + return dt.tzinfo._tzname + else: + return self._tzname + + def __repr__(self): + if self._dst: + dst = 'DST' + else: + dst = 'STD' + if self._utcoffset > _notime: + return '' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + else: + return '' % ( + self.zone, self._tzname, self._utcoffset, dst + ) + + def __reduce__(self): + # Special pickle to zone remains a singleton and to cope with + # database changes. + return pytz._p, ( + self.zone, + _to_seconds(self._utcoffset), + _to_seconds(self._dst), + self._tzname + ) + + + +def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): + """Factory function for unpickling pytz tzinfo instances. + + This is shared for both StaticTzInfo and DstTzInfo instances, because + database changes could cause a zones implementation to switch between + these two base classes and we can't break pickles on a pytz version + upgrade. + """ + # Raises a KeyError if zone no longer exists, which should never happen + # and would be a bug. + tz = pytz.timezone(zone) + + # A StaticTzInfo - just return it + if utcoffset is None: + return tz + + # This pickle was created from a DstTzInfo. We need to + # determine which of the list of tzinfo instances for this zone + # to use in order to restore the state of any datetime instances using + # it correctly. + utcoffset = memorized_timedelta(utcoffset) + dstoffset = memorized_timedelta(dstoffset) + try: + return tz._tzinfos[(utcoffset, dstoffset, tzname)] + except KeyError: + # The particular state requested in this timezone no longer exists. + # This indicates a corrupt pickle, or the timezone database has been + # corrected violently enough to make this particular + # (utcoffset,dstoffset) no longer exist in the zone, or the + # abbreviation has been changed. + pass + + # See if we can find an entry differing only by tzname. Abbreviations + # get changed from the initial guess by the database maintainers to + # match reality when this information is discovered. + for localized_tz in tz._tzinfos.values(): + if (localized_tz._utcoffset == utcoffset + and localized_tz._dst == dstoffset): + return localized_tz + + # This (utcoffset, dstoffset) information has been removed from the + # zone. Add it back. This might occur when the database maintainers have + # corrected incorrect information. datetime instances using this + # incorrect information will continue to do so, exactly as they were + # before being pickled. This is purely an overly paranoid safety net - I + # doubt this will ever been needed in real life. + inf = (utcoffset, dstoffset, tzname) + tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) + return tz._tzinfos[inf] diff --git a/lib/pytz/zoneinfo/Africa/Abidjan b/lib/pytz/zoneinfo/Africa/Abidjan new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Accra b/lib/pytz/zoneinfo/Africa/Accra new file mode 100644 index 0000000000000000000000000000000000000000..6ff8fb6b235d413a87fda2af8e7ea9c4bbcf78d9 GIT binary patch literal 840 zcmcK2J4jn$7>DuK%c3A%R1jUflfWK@mRe8Ac%+v(wUGjmZav)#jPE-@ zVbE+1)XR27((QC~*!^leax4_35Le(u;2QLz~@Qx-xtB-(`QO&>ftW$l=!! zcN7cS?3Q0p_i(d5R`3SvXz`rlg=nrQY z5*gF0gCe7Pby#FvWME`uWN2h;WN>73WO!tJBmfe@t09mWUJZgo@oE?(j#mRAk&sYG zEF>5b4GD+DLjocZk&sACuLeb;dNnK(*QA literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Addis_Ababa b/lib/pytz/zoneinfo/Africa/Addis_Ababa new file mode 100644 index 0000000000000000000000000000000000000000..750d3dc14cabc52517d6be5d76da4080f213f4fc GIT binary patch literal 283 zcmWHE%1kq2zyPd35fBCe7+bgj$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lrb>?#y}vl5vt7$Nd+ex-4@c!DPSUx@;>^{0 zr+d9Bax-7+-i|b@`@yh!xxACt=_4IVo$CTGugk6;By!lJ%TrV0&P=+M3!bVZWIk3xQL#lLuJ!dVWt@pw>l>^J&ZYM2?$EF= zqh^lxjlrlrwYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Banjul b/lib/pytz/zoneinfo/Africa/Banjul new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Bissau b/lib/pytz/zoneinfo/Africa/Bissau new file mode 100644 index 0000000000000000000000000000000000000000..0696667ce83faeb71e52b4da0531fc59650da77f GIT binary patch literal 208 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$eHwPt_){-q5uOU)Bpb;Qy3V48kU9nqAKws$ haK{h^cOVS`AtV_81FGjg5P+-!aY?X>3uvnW7Xb2-Bv$|c literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Blantyre b/lib/pytz/zoneinfo/Africa/Blantyre new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Bujumbura b/lib/pytz/zoneinfo/Africa/Bujumbura new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$k@45>{mIIm zmY*R0b&bux@aFcKH{V-%=HqzjE`7YdNS(NIL!UfWrOL`acfP&wj5@WoQh)bNwJKko zuhpl8>igFnefo`UvMOnkQ&k-=ue8|jT&Zx>Z&&O6tGj24Yvm>W^|d#}jW2h)eqoIF z`vFY-E8r}&)?3$Js9>qc%!6j6IN`A>RNyy&%O-KX-_i7tzR z?$g7fMc0Yl-I$n6uUpF3Zuh3kMQmiPAA9M57blARxRXP?9+h9XJ+~By_@fv7UMn|y zy*IYeeP*;&eM@R|!pQ5Y--NR|F;1!eL-y;W@EvME><`X>Tjer2Y@d^SX1g3%^`0|m zXQ@m%zSw!LbeyC#Z*}1k|w6gLP`_206TMv>vXf%e074 zowheZjqrcgBUkO0qt1mm>3N4#`iikS{iAxBIqzAWxpjrgJa^E^>bg`0l4m%9!deyB zK1q*hJXvOUS+29?dYN6UboMn*<%CYtIo(rLPGF&vb84c>y*NteR_{>by8Pmd+cr!E zkH+g@S*{G$t<*2C>ZZo4L}$XxFy*}efs;2TMoyfYrY9velaojK&MWOM%lx?0PJT_9 zoEpB%nOd<;PP_7sGky1JIpfR@{p$KHYUa+j^{nDmYIf;-?T(+V<`j(8uMHlp3Ni=k zxl#Spyp(KbelsZ-bWU~_TuP7&?{snskF}MH&Nnzkn``CbuP!)Cmi;J;*Bx<6@(;++ zkcaeY6#C%Ty}#SY{Plmn@BidMzb`~&z&w>Au1|=0L_0@|=J&p*XJm=g)J#6I_<0B) zS^xd4?=%0+KI?z*3+C>cTXHC43-at+&GU~m0O6Ob+-Z9w{fGy>@a(h95T z1=0+w=?2mcq#sB_kd7cNL3)BT1?dXX7NjpoW01}ut+ATkAkDFw?jY?!`hzqG=@8l$ z@r537rb$4TkTxNG;%F4oDUMbly+WF0HQnN9m(}zOX&BNmq-99YIGToZ4QU(FH;%?3 zo#SX7(mSMiR?|I>_E}B;kOm?hL|TaSkfVu67m+q1edK5)(n*e1BE3YKiFA{romSIN zq@h;RQKY3vPdS>3bQNhU(pRLhNN17OBE3bLi*y%huhsMyX|UCF7-=!mW2DJQmytFj zeMTCMbQ)HVD}vWQ&kJLN*E6C1jhB zeL^-0*(qeJtY)u}%|dny*)C+ikPSn24B0Yd&%{HTGiK_+uluw0KMY#9S-1EEcCKee WVz2(a`uFbBGb7o2N$j1F82&ft7KZr% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Casablanca b/lib/pytz/zoneinfo/Africa/Casablanca new file mode 100644 index 0000000000000000000000000000000000000000..c001c375ffb07c43dd32efde788a832898e8560c GIT binary patch literal 1657 zcmdVaYiP}J9LMqh#+chixflDo<$+;yeZ*+FG#fkHnO*FT&D?feo4Ngy@PIuMk@$*S zN=qn;BxNHMlicQ!>l6G_7)$eg|0p$&c;aem?-MSlsu<_}aboNpE>4AA z-1qYBXCiXT_^F%thER|Tt*Z%CwLnCR!26}0G9!BOgU-Q$8 zUPUtYU-IWp*d5F}q?#FfSu>LlN@nFr4Q<+{p~ts1E9S6fEnlPAlWR4*eu!o_&y;X_ zj^^|$)SQiVnsfTR%uk%G^S?~h+^k8G>lbKVpRnXT63tIICi%J1lK*Lv6kLqfg3oEX zV0g7Gc-UVGH9-pRe3Zi0zFKtQn-u#|QhcyPitk^QlD*ARTJl0lS9Q|TBcr5jbBmUz z+>r8F^-{jBNf#D9)kUc(x_HVJtr+k|DhD0b%Aih{eD}5L=1yJuyj+)^s?y~RTV=(r zIlA&hnN%+st&tt6vMQ7&t1Cvxn&d=TJF~N_>lG*K#}3o=K_E5pFEZGy-Qoi;^&jyXDQlE^GM zOcR+WGErov$W)QJa+oYKTc=GInXl6(jLaCBGBRgm(#WilX*+G+PMbI~bEi!mnY+^_ zkIdd_(|6kZkpz$ooR)&qazK)BS{6tePRj#H#A%rzsW>ecrzL}AfX zk}8rbk}Q%fk}i@jk}#4nk}{GrlC;yZM$&d#-bms|=1A&D?nv@T_V^#9-_`yNlK5Ll T%osmr;+UkwjN~L7JHG1=KS->1 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Ceuta b/lib/pytz/zoneinfo/Africa/Ceuta new file mode 100644 index 0000000000000000000000000000000000000000..6227e2bb09f72bd8702782f4866adfee844da46c GIT binary patch literal 2075 zcmdtieMnVz9LMoj}`}kMx z&`;k~hECXH?Gv|m1-s+@)4I<+vBN6sgJBofh2FUzVMUbXg(5q$gHer7?ZHyqkkleDaMnKb`9NguUGGE!?LqbDGf zM>gr?n`JVkyGEy;_R7aMiZ!#jRI}Qr>L)v=YxWN@I;|p3r`J!B8M$utmXDN~@%J>x z+atN|4$X_ZC3%nkl3D%F%B;>yGW*YiGN-jg@-Ls!g2o1!d-QOMdxBmv)8Avi;Fg()LJ}2mY4Q zrcPN=+9hRMAL?h>CuL>HWi5~0EvvFxwIb|iT|G*5_5JU4O^;vKUaHg2Z~CyMe@a(ShaVi%lg7h@mCC&FH=U#hMa$8W3(kTNkg?JbVq8Vo@?#z=cVrP zb=`ElSvGgJ>z3W;bZhG#-Bx!<1C0T7^>g(f(DyO7FF)>qgWk-ix3_zMdHXoVC~=t= zvhVxl6D!iPV#c_ld2U7K7x=72Ib20P%WH1D+?eBX@#7qq%lG=1KWKi{dHG-T{gKsc z&bEwG{~=>Q27!#i(F_9_2Qm<3B*;*Zu^@wSG^0U=<7mdizhyvcqGe%^P$S9FvBI85`ii{K)Dl%4Nu*hhU z;UeQj28@gt8M31pGcssLGiqem$heV#BO^zKj*J}{JTiJ@_{jK?03Z=SLV&~o34)`E z0ulyC69*&^NFgTx035E3CIL`aN~AR$pg!sKY;gaitS6cQ>VR!FdrXd&T3;)MhZi5L70_SKVhlCD^9TGewdPw+?_#pwb-pT@on>J2NA&#Dcw8ZqpWKTg#GA5;k F{||>~3L*di literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Conakry b/lib/pytz/zoneinfo/Africa/Conakry new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Dakar b/lib/pytz/zoneinfo/Africa/Dakar new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Dar_es_Salaam b/lib/pytz/zoneinfo/Africa/Dar_es_Salaam new file mode 100644 index 0000000000000000000000000000000000000000..750d3dc14cabc52517d6be5d76da4080f213f4fc GIT binary patch literal 283 zcmWHE%1kq2zyPd35fBCe7+bgj$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lrlrwYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/El_Aaiun b/lib/pytz/zoneinfo/Africa/El_Aaiun new file mode 100644 index 0000000000000000000000000000000000000000..805d39e415ab39f1d27f8243b871b8fc6cb73af8 GIT binary patch literal 1487 zcmd7RNk~;;7>DtrO_JuYXoT=BixydFmaa-dQZX%u>pWXIl;v&29P-ejT9mX1qKJCI zXxNC9P@qJcCMswkiZBSGY!$bnfg*12^UcXghBpsKBTx%bDJXivB} z)pqec!MEFJZs=9?qoHHb?_#`|#+=Z!yP1A$|Hok5lOjKU(LgYF&09ZrWPUI&X^xk7 z^l5P2*bP6w_D!(h#5I5YiX)*7r&SB1Mzk>XloYmH(4w9LTJ-#$7Ed~(#qGPaB%@19 z2IpzXShfW6OSN=njh1%zYw4vcQkI;dWnWiod2zaw`&C*oJs=g&L@N^qq_R9xDnF-7 z)zwI?`jV&B3p%9w=`5+yM5(#|QEGn7(AweeQs+lV-Kl!1dvsmukB>=1{cCC1FHz_Nw;2X z)An0^vhDCX-G07NI$D=#@L-ngD9V$a%?o8$YO?Iko+5h^VrB2LB;6YdNoU+7?R@xH z_Prn1uAyP+e$XjBCm(9>xkTN+@3QuF-4-umLgYXCgu|cI^TOc|Azo{2d_Kp|2iXs@A!J9$mXJLmn?iPlYzx^JvN2?5PTLx? tH>Yh5*&VVyWPiv8ksTsi#Q$xNQ*E3ntTeeGZRyITDai$?DYz_c>M!abWFG(k literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Freetown b/lib/pytz/zoneinfo/Africa/Freetown new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Gaborone b/lib/pytz/zoneinfo/Africa/Gaborone new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$|3HA>R4$GwXU9&d$p(IMKUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKikz6gr8^RY*kR56}pS zs5DxQNOYrBjf#RoN5yg;&CF+alT9|?U$D5gG9-Sin0>=$_1MjQrfQGN<9i{F zvc}mOk&*3p6J09nn5vlg*t1SdC{xqz%cMVLQlg|?(QDj?U0G`q#=G`p`s&Hlos8@H zv$$&5*_4^RN7Yz9)lI9ns(H4cTV~ExYhhVt`wmo_cc^p84V8Pjk?pVhrbExk{N1|g zyr}5WYS9-F=a(xgL>GT7`Uf5R`*~laxEJa9g+NSCmN)`YwEKGne`xXc_0fl2VH2Kh z7oZK&2WfP)HXyqA}x`gNK>RM(iZ89G)6ijt&!eHbEG@c9_b%y XXMoHBnFTTrWG47ub8+pE28P{Fn%;rY literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Kampala b/lib/pytz/zoneinfo/Africa/Kampala new file mode 100644 index 0000000000000000000000000000000000000000..750d3dc14cabc52517d6be5d76da4080f213f4fc GIT binary patch literal 283 zcmWHE%1kq2zyPd35fBCe7+bgj$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lrkz6gr8^RY*kR56}pS zs5DxQNOYrBjf#RoN5yg;&CF+alT9|?U$D5gG9-Sin0>=$_1MjQrfQGN<9i{F zvc}mOk&*3p6J09nn5vlg*t1SdC{xqz%cMVLQlg|?(QDj?U0G`q#=G`p`s&Hlos8@H zv$$&5*_4^RN7Yz9)lI9ns(H4cTV~ExYhhVt`wmo_cc^p84V8Pjk?pVhrbExk{N1|g zyr}5WYS9-F=a(xgL>GT7`Uf5R`*~laxEJa9g+NSCmN)`YwEKGne`xXc_0fl2VH2Kh z7oZK&2WfP)HXyqA}x`gNK>RM(iZ89G)6ijt&!eHbEG@c9_b%y XXMoHBnFTTrWG47ub8+pE28P{Fn%;rY literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Kigali b/lib/pytz/zoneinfo/Africa/Kigali new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Lagos b/lib/pytz/zoneinfo/Africa/Lagos new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Libreville b/lib/pytz/zoneinfo/Africa/Libreville new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Lome b/lib/pytz/zoneinfo/Africa/Lome new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Luanda b/lib/pytz/zoneinfo/Africa/Luanda new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Lubumbashi b/lib/pytz/zoneinfo/Africa/Lubumbashi new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$|3HA>R4$wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Maputo b/lib/pytz/zoneinfo/Africa/Maputo new file mode 100644 index 0000000000000000000000000000000000000000..5b871dbaa7c2969f6b4dfc854184a29010bfb2cc GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$Z2vr`h|g!fkCbZBqgK3z~bW@!r<%}0^%}+kYL&m Tunq=>|3HA>R4$GwXU9&d$p(IMKUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKiGwXU9&d$p(IMKUsi;r&zL$G6T2uKbLLI_*Q4pa!X@&{DUe;~-Mc60#IASZxm ckTXCu$SEKilr7i>=P8i;0~l=AcU}0Flz;VKsEgbf~cxxb3io6YLGClrwYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Nouakchott b/lib/pytz/zoneinfo/Africa/Nouakchott new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Ouagadougou b/lib/pytz/zoneinfo/Africa/Ouagadougou new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Porto-Novo b/lib/pytz/zoneinfo/Africa/Porto-Novo new file mode 100644 index 0000000000000000000000000000000000000000..b1c97cc5a77eb187cc8ea8a4031a45a9bf153b35 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XOWpIe>wYfq~foB*iDdz~bW@!VvBl0^%}+kYL&m Tunq=>|3HA>R4$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Sao_Tome b/lib/pytz/zoneinfo/Africa/Sao_Tome new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Timbuktu b/lib/pytz/zoneinfo/Africa/Timbuktu new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Tripoli b/lib/pytz/zoneinfo/Africa/Tripoli new file mode 100644 index 0000000000000000000000000000000000000000..b32e2202f572b8ca6ef3c5cf1d9e787a24f2c328 GIT binary patch literal 655 zcmcK1y)Oe{9Ki9Xt$}*US=39>YHJ`OImJk57onkU5{bt^(oWAP25I~WOa>97K}6ET zAO?%6iNq#Co+cUzi5v^D@O)2ACWBx5Jm0%Z+FU-5vyfZ0#jmL`PgqRUEUudiX4`)~ zkKdjiTX(TTSzf!c$`32LGB~Cx2POTKxo}&yCS<7gO@%)Cb?alF+jg@g+e=4o$JxCM z7uVg+y^!wOu2RCIYm_hc_sEEUl4q~A>>MrH5#iEC$GL$pZ5t=`+di5p70f-VbQ#&2uFP8rJF2K$F*pjo^ixvdY;V@X|sOD`GdfF z^+%s(kf3N#L?|j=jSNM{s}Z6o@i$0Ow7eQIiW)_ZqDK*=C{iRTniNrrDn*u}OA)3h cQ=}={6mg0=MV_M1a02vyP9bQHWWWx70c9krqyPW_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Africa/Tunis b/lib/pytz/zoneinfo/Africa/Tunis new file mode 100644 index 0000000000000000000000000000000000000000..4bd3885a96f61bbf9c0db6b42956b02d6e2bccc4 GIT binary patch literal 710 zcmchUyGjE=6o${{A{s~>5h6+oNsO`3BBD)5WlVy23%G?^r4S+AB6$Ey0~XN-u+U0T z@D)@rsEN1MB8!6QWD5%$(fJm%(8A6$ob&AtGcbI=xm;Qum0#7ScW5fAoA0i5J+_L4 z`Sa9UwuvZnnqNZRN?nTMMiExo|O9 zYdWFIq1Ab?;;3?;72QiYvGkBLFx>0JBLQbHeP$*632UMpbTa96nT)rp+047j`7f&Z zSX1R+p4F81L>WGh*W>elFa7spiF9o0$BHK@K9NYj(Jm5F68xI%%*>in(4EOHFid^i zFt|jSe`|^9W3HAr_lC%auBBY}D?}4S6-1Xytqh_Kq7I@DqR^$*=u)eM=!7VRXoaYS z=;awGhG>SUhUkVUhiHeWhv@~D{({HgI?d{9CnTk@%B(rnAgRSiRGPm) z+{rssdVYdr%+6AoOQuWa*G0+``b9j$Q&iS?n9jbn(5!v-S?62`Ho5&zbzaAq@t(e} z*Ht_;`AuhaLHaebzN}w1%7^-7A~Ru`^Hj?K}fzC=@28=%V*XPS!aS9<%jaZ?#}M^}CtHdRw!O4XAAQ~mL_ z)C@dVwL_O==ee7zuJ5SqYVB6_T|1>=YrFC{=E?5W<*Ko`M(>HpP3m~ad)`Z1L!dg`F(mEYQ57&@;)K8;*S~p* zsLA#}l^Nj@C=xMOf<+d`1=!QxLpcM&L_(Fg^1MDNEOOh!UFeexdnw}b`+R?Qsl-0v zxHSIj(I0m8ExFAQw;+B&9OG+v25}AI8^k$=cM$g={y`jscnERPq4g2sq(kc^#7&5w z5Jw@NLR@9|3UQXYA8d3;=Iwk>ZM~@ZF3*4e|L1VW+uzq$ z*ZN$h>tDxXe&OW|nwPuxRr5JJP~@Ln{#*NGqa$xlKCNz@>~I1f`K0S;wG(tGR|a?I zI3XPybZBFhKddTShZkh|-5GJ({n~cFCwfkMMz1=N*DvY?-%tA&PT!DGdfdP0i)ne+ z@hKj6`d-eUELMI`3i%wXTyfon# zM=$;{OC?Tb>Lp*rss}!c(Mj*!P)oZ?sk!etBjUUWM-#SStWxqyE<9r zcz4R&tWfo^+b8o9mx;V9xiWt~L_BgPUOxKmWwAzO>&M2=h{p%x^xAih3%O%X7wkW+ z3M((_Cz?J~MXSHm#YF?E_}6Q)B<%xHa^buzi#jCA#>Qm%Y@2v$_-noHn**w%=Y(GW z`SYrB^N@Cql&h*WyY+^yR8^f=t9><#ly5#?*W_GRHJ5AT#-u2*@pPWtbnBX^dp|)w zGjm>S-ajv&ofs4KO*3-K$eW^};-+pqctUMWyQH_Z45_B53Ef<>TQ$!f(=Faw)pBm1 zZguCY*3l;2c4e`8;Yh!H(Vr)_cWsj|sRXg3rdYNgo);atUb%C}jOdK>$X%6M>B|A*`)gF59_^0pHxA?Awi*Gw|l}P z+;{K^1nRcBT!BEd;|dJ~Ivau_?02F4CEDI&_`2c)f&TWOCH9`YuEOV9Uv1u6^Ey6P zp}7|p`CJ91=2BWkYBIU|!sW{pf6 znKv?VWah}!k+~z2M`n*qADKUr03-uelLD*B0g?nH3rHG}JRpfcGJ&K5$pw-OBpXON zkbEEsv6_q^DY2TIAW1>8f}{n>3z8TlGe~NX+#tz8vV)`t$q$ksBtuAwtR_cDl8`JR zX+rXZBnrtCk}4!uNV1S@A?ZT$g(S>sGKQqgYI24o&1$lSqz%a%k~k!DNa~Q>A<09s zhole5ACf?;$sm$KtH~jfM61anl13ztNFtF;BB?}ji6j%rCX!AhpGZQHj3Ozunw%m@ zwVJFVX|u+M{XsK zHixs(agMDvX=l2Xu0^-3wTU@~TMuKAx;2xfYdy`PxSj8*trs<4a{l}OKL^GO-u%A4 z+NNi+od28%^BZ2y5%Y3S*=Ih(hjId=i+*kytuK(jkCv#Zvs=8uH&;uiuh{E)d5H|^ zO!CfXEz+U&g@KvXi8?GlA>hso*X~z01tOv+bj0A8*Ym?geb<%YK;&09WK>`#51hE6L`R-{uxnT?^v{=zwtXiSXa6Y^)?5&YvERs~ zlv5&UYEmbA4vXaRuXV~qi%1>%K&M@PMWy$j&>3g8tEGEi((<(`mD#9dR$G=@RsPhHy;|nR&lb7Ym&m-yU&KS7dgQ}nUx<}DQ9ttDIq~R`aJ}mIap7&A z(D?`6RRxt7bz#FXRkZYNU0k$F75{WqmZbNIlFR31S=3HZc4k1XxmhR5U;j|A{k&6E z>^`X zwF8mz$?@}|?!Ybi)X;#~*f1hD^>>TS72oUn?vv{2^hn3-qY8v$GZP%mJvq#(Hb0aC@`Hsisb{#32{VQZk&n?lKGgrRQJR;g+CuMu( zY0(~x(;c~&RLA&lx^s?JokL@K$L(IVv;Td)>&tevd+!^1&so3PyRl2Z*q5tZAv0W| zGw<|-g}bNm2?pCb9mjk(JE6f~SBop$exvMfX4!ijUnf2o?AhlM_MWh|!sis`^FG0+ z%ID;l*6=CvIUBf?n@fJVxtZhQ!}y&=dXWWN&5DsFBWp$$jjS43 zHnMJH;mFF7r6X%c7LTkRSw6CUqyR_-kP=u;4XmaJNEMJWAay_rfm8x11yT#77)Ujc zav=3U3W8L`YD$9C#A=FyR0SyuQWvB!NM(@HAhkh?gH#784^khbKuCp<5+OCRnj#@p zLdt~H2`Lm(DWp_Lt&n0N)k4aJ)C(yXQZcJ38B#N=DH>8Wt0@~&H>7Y#<&e@LwL^-B zR1Ya1Qa_}CNCmB?gh&morie%tt)`4f9g#vJl|)L3)DkHsQca|sNIj8)A{9kSiqzC< zii%X#YRZb#)oKchR2C^MQd^|BNOh6&BK1WIj8qsYF;ZizDKb)Jt0^;5XR9eRQfZ{r zNUf1#Bh^OAjnvyQ`3?=48Fr_~9T7f_kK=r7A~_8e|2O0R+&uI>kJ6`AEX&BskZBc} KnHiZG((@OSrEiM> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Anguilla b/lib/pytz/zoneinfo/America/Anguilla new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC>n9!4DOvr?i+4S!rgo> zo^p@HFB6g~eV$3(cIuVi30X}~sDtFP9Gqq4u>7YvTzR98ZptLRlvc+_pCl8;)#XQP zuJQ-e_1d8PDXG*^U%f<|UsX1?YqFmseREzQf9EE3#rJuseD_0F=_aG%+q!ysL~4q* zbnWZMruJl9)wNuhy1h*$Q}3mrqFXn2I{tZF?z zm9~X?{bb9U_OTNEbmN0`^!(DjGegp+T6K_{^We+c-JKw?_B4}|XYKw@P}tQS2G3r+ z)@=_ZkO65w1sx(U)P&z4yAP5x)D~SIVCm|p~ z5Lz1B#X<0OsFpU`S7VLsP!7@}fR(-zPh8@CL_w$mz!;BmR?pB~`i@n5SFodiH3@pIwQoa!=Bft9RA&D|vrDeM`^3ziJlJ zRxRjvrV>l&#nf?A>kg~hXx)5nIi>2It$zLOsBUabnMS#yzSM^N<;rgTwUjX5iv8;Q zos7RSoKW^!m%nEGkPf#E7Ix$(bX#k_(4PFH+drHwbZlR*I)WpGnD+F|ryLB2re`e*J8NS_ztc5IwtcEP- zl=YAWow6dbB(f&5D6%TDEV3@LFtReTG_p3bxKma~mUqhfNC8L%PALJY!6`)`RUl;` zbvUIEq!OeQr__QJ fr97lQq(G!Xq(r1frxb})iInO6-!vlz{>7kP+x`IDW zMBZ?5y5-_|(-pbhxsg%#FZAmNTc4Y$?HN6_bVf~YzG$Z>$4$O*PtV+Zp&q_`V;`N5 zn%VpoZHF@E@#v_YJ6KSKOC|ecTiwhb3+eggxGMG}ZLvC`o?XrAg}&Qn;r%tc*yq%u zd1uS9gjq@*x3%tws*Tm{=kO_2?`qZcw_~QUF=ZRYviedR)?dr}%(p_qexDgoKkoMH z<&lJP*Z1nxGzZV`(%gY8$Vg&*T+uI*^!Ce$b`s@ z$dt&OUYQh`)hp8?^CA-?Gb2+Yb0d=@vm?_Z^CJl$8F(cHBnPh~fnM6yKEMDj!u^-88ls$R)8_z!!HO2m7jKLJ6C_WS?< literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/ComodRivadavia b/lib/pytz/zoneinfo/America/Argentina/ComodRivadavia new file mode 100644 index 0000000000000000000000000000000000000000..7cbc9f4bddf7ba58314977a4a42f76e4426c5e48 GIT binary patch literal 1129 zcmd7QJ!n%=9ER~*o75NuD}suMRBS1U4u@)tD3wz(6vQrVpwh`f1VK<7tRQ|ZPC`I} zAha~vRp@c3mZsWcV~utwfi#HV5K!C{l^EhzkN@Y?Ne3N0;ojfn!d>z{>7kP+x`IDW zMBZ?5y5-_|(-pbhxsg%#FZAmNTc4Y$?HN6_bVf~YzG$Z>$4$O*PtV+Zp&q_`V;`N5 zn%VpoZHF@E@#v_YJ6KSKOC|ecTiwhb3+eggxGMG}ZLvC`o?XrAg}&Qn;r%tc*yq%u zd1uS9gjq@*x3%tws*Tm{=kO_2?`qZcw_~QUF=ZRYviedR)?dr}%(p_qexDgoKkoMH z<&lJP*Z1nxGzZV`(%gY8$Vg&*T+uI*^!Ce$b`s@ z$dt&OUYQh`)hp8?^CA-?Gb2+Yb0d=@vm?_Z^CJl$8F(cHBnPh~fnM6yKEMDj!u^-88ls$R)8_z!!HO2m7jKLJ6C_WS?< literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Cordoba b/lib/pytz/zoneinfo/America/Argentina/Cordoba new file mode 100644 index 0000000000000000000000000000000000000000..cd97a24bdb7bf9661349f903818ee2323d107fae GIT binary patch literal 1129 zcmd7QKWLLd9Eb6@Hq{seD}suML~JRE%TbLHrE(=hRm{=`DxDNW5Cp}+3gW-TNeD;~ zgqFs3RXm4kX`?-jHQJ#B(jtOGKygzmv(B- ze6W>R!px_R+gevx)kf;}OUo%$?`+le_ammUK4lxFiuzg`)Qgqf=36mgzZd${k2}43 zX(*xG^<8?^*dY^cJDuO28#isOxqN%_vuXcyF5j_jt?DQr$;SexV`gtYUahDdxB8>W z`g5~qI&OLfSDT(Um!k(3N6o>)v1rqZ=70RPgqD9RvDu%UE0%-6KSR!yY!G&Cq1?2_ zxwloG3uWHO`*q9b!|61K3+)($;c}yAvqyQAz2}5A$g%sjDuvx-%DypZb))Sc1U_&$qz};D;Xjw gdL>6BNhC`oO(ahwQLki*q>AJk{D-|pB;uWspN(PoumAu6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Jujuy b/lib/pytz/zoneinfo/America/Argentina/Jujuy new file mode 100644 index 0000000000000000000000000000000000000000..7be3eeb6d0426ba1d1a2a6963a6234742ea0950a GIT binary patch literal 1145 zcmdVYPe_wt9Ki9{oVrE_Bd7=oA;nym2Wu^|;E^4KXqPz{cJ5J_z7 zd#TKg+;8;vn}5AOoJhFIFXSe zV?{>mmGL4Y_R5%%Q6u9 gD@h_*B55LdB8eiIBB>&|dL`N5KWR21N$-yQ0H&!1x&QzG literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/La_Rioja b/lib/pytz/zoneinfo/America/Argentina/La_Rioja new file mode 100644 index 0000000000000000000000000000000000000000..1296ed44d5f2a8f9d7f2cd0201187e5ec7baeb08 GIT binary patch literal 1143 zcmd7QJ!n%=9ER~*+i0wU6+uNrD)z%fheI_+6!nx01u;t-D0FfVK@b!ND~Ml>oP0!8Pmb4z5JFUhyT(IN0QIoIV(-Svds)w)M+DGS_ z&18O~wgYK1H9V{z?=Psr#gcupxn`!1HtFfbs48~HZLxY+J-d?8Gs#RRmAMeMyqAD z<7R(ry!O=Wos1ftpI5rF%JvMdG(E2`wKgnk{KsFYY3aAz(-=bMrcy!RT%i;+Iajnn z*ttrnVYPE_s@y9j-pKp4OV3A=sURU&cQ038D(L6w_mlejZinO>zgzv+$4<_fA+rLKS)Df=?H1bD?K4iAzdMDA$=i@q3?_bX^p>^-jL>y?vVD7{=Cv4 p(xF#cM0!M;^h%dVn@FEXqe!Pnt6u39Y1S*<2LEHY%~JEO=AWRI_~!rs literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Mendoza b/lib/pytz/zoneinfo/America/Argentina/Mendoza new file mode 100644 index 0000000000000000000000000000000000000000..f9eb526c7bef450c9726e79ee8e34312648a0e3b GIT binary patch literal 1173 zcmdVYPe_wt9Ki9{+;oi&hR{DGgcNgKUd**1lxOTG9C zEH`XcpKQ);7vy#0O2)r?I;HMyd8VhgX4Lf3h(ELOteKgb)U);5YVPU_|NhIj=E143 z&d+XAW;mlCj*qKHdkTKxT**9ctLgcJ0X4rG@r&`8DOPUzPcLTGLh`y^_;ATACau4y z-5=FZZhE$$dK1aXhyzJEc2XvboOKC*Aq+WUgy-uj+bU(A!szm~OpSM=Gz?j%!2h zvDy>8J0H^3 ztP^8*l~ck(;&Mu0NMuN8NNh-ONOb7J;~??z^AaEu wArc}IBNC)jqC~=UN}NcbNTf)pNUTV(NVG_}NW4hEPKnt2Zz>K;1~kKK{Rt4x)pO-f{Q29Nd!MH$8OpNO$nZ z5s^1s96fUJyzzqEZd}QzyJu4R-j-)(YHLPMEu2u(o6g$l$qAEh+}1N!pR4;XUfTz! zqo$DGtnE<7JRBR-kM%s&}{O`kQgn*pRf1Qbm2Ljp)yn-R4U%Zoke9sBbq@ zdU-Ui+~r+*)x-f4X+NIZo}D!9E!kX0;)CgUe=66xZJp{YAIfzFORCH4&BYcgYR9#~ zwnY7j*;9xaonKPAy2kd6tTufw&$l(LX#U4D9Qx&K4zHA7&J|jNz`0^M2su}>LBzRg zxoNF)uNJvi%e;~I>wi2S>`w6J;5S-mnXGA}YQGBYwYGB+|gGCMLoGCz_4l7UxJKyvU(5=a(E8eYi*NyICe zAgLg^Ajx6BNhC`oO(ahwQLki*r0SJiga5GCs6@Oo`W=TO_JaTb literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Salta b/lib/pytz/zoneinfo/America/Argentina/Salta new file mode 100644 index 0000000000000000000000000000000000000000..5778059f35314bde0c8ac450a9f211d6aa1d053f GIT binary patch literal 1101 zcmd7QJ!n%=9ER~*n`#V#m4b?hRBS1U%b^-0R^gNk1u;t+sC04=K@gMz{nZeVix`RKi zu)N`L^~m9Q>lL}(xsg>5M$-D>ju&QnS5{9~&Z(Jg7wyc{gqdyJ*K;>tszeU{zF%*cIBLQjXY+eLRem+wq`GMyjK=eu@qR$Zmz`DoyD)Evsks%5qJ)&VlkD0#Vji&GQ<@Tm^&Hwmo39bFsW3wFR7AAwhxk4!jIajnn*twNb(1CkKUCw_$4;)H(YqC7IAlCzKx9O(42g{C zl|hkFkztW>k%5tsk)e^Xk-?GCk>Qc?y%GQtfmcF6VnBlMN)$*KUWo$<1c?L*#VfHO z!64CiB^)FkuLOidgoK2|gam~|g+44EBrg750z)E0LPKIhg7ZprNO)d}4++pK5h5WX XF(N@CQ6gb_CC=c#t1}|W?u+~c)0yx= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/San_Juan b/lib/pytz/zoneinfo/America/Argentina/San_Juan new file mode 100644 index 0000000000000000000000000000000000000000..8670279e40f637bba98616999b6a4f641fb57ffa GIT binary patch literal 1143 zcmd7QKWI}y0LSrHlV}Wrm4b?hRO}xUm!ldZih3bKLClf{3Y{E85Cp}+3gW-TNeD;~ zgqB3R3SNh3NuxPUYqVeqq(uaWfa0d8#E|-@=Y6kEI_T(y_dbsYZ^`eQ9yxhp!29C_ z?Hex6A$#$>@ru3Oxseg~FQnvyEzi}|wv3!wI3tRiF6v@oLQS_PW$ETi@$l7K{pfs1 zm8UmLJ(5w6$H(Q90~JxZRMRtCTWa>0U(PN^L^U4M)y6&X>}podC2y;_57+d3(ujHW zUf07>wU9Wjn+JoUnQQ4Ufm5P2&?#H*a;n{x(CunnEHy{v*ZN-dtrFGWOGDzv-IQD& ziwbjnk6bfxLm*ai5TbqifaR>&x98t9<|X3;0)ls~x@o8dIL}JYy;~&u>gsdqHCs zYaQ#1dDGy&SmTX-zhUM1Xfo|3>=jRO4W+$do_;^+zwdU~e&cp)|N7Y2b9QX9gzSiH ziR_7N>a@Ee+dA#O$i~Rd$kxc-$mYoI$o9zoNCQX*NDEHe1JZ=kc7e2k^x?FPAe}gE zD@ZR$Ge|d1+YZtX(vZ`3gtX+eJt0jYT_J5DeIbpZ>x>868h$Kf^|6{lzTl4PFPk+VuRsaA1 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/San_Luis b/lib/pytz/zoneinfo/America/Argentina/San_Luis new file mode 100644 index 0000000000000000000000000000000000000000..51eb1d84eaa44e785cd4a690fdfe05f32db6302a GIT binary patch literal 1171 zcmd7QPe@cj9Ki8s?z-#J!3g>*T~aJhgYC9KRI_;4lDw=7g{KY?1VPaugDC3KDG+Mu zr0kY-3!ErR)N*XIEh*VpCAtYkw}Qf!t<>rFow|9bWAm8z83x|v{l3}WlP5ZYRjW~M z*sT4sIX7RH*X`^5>fwcqezfJK8Qs>eN9WF{v5gn)*vOC>uieuVH{Ytq?>^Wk=VNAa ze3Q1l{bp)#P(R&URK-hW`>eTYrjLd6^h&!brINO^Fsxo&$?2KyTW03-RXf}5)U5er zD?1ZruJgEE+!s}g`KtXEKBcN1O}hFqZ))p1ZLL&M-xvGzkIHWIvzV|;6FqAAZbq*R zB$T@r*O8$v6Ky$N*q$3PEls&XYx1jUUA|Lj+Z@+zuZw0!daF%VpPN0C?M9E~O{!k$ zbYG)M-&iW7D;aa}%Q=1Me%y4uIjZZ{)c@lX4)HWZ8p4rqLpT)SA|Mm8n2Xt)Pod+ROFSCkeZO9yiyfXmRIUR3PUPGN<(TxibG!=2Pu!=OMOUz xNQFp=NR3`85~MVg(8(Ar6RQ=#d@V$uat|_ixiAh9Q^+o$E5LHv0o3235x&# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Tucuman b/lib/pytz/zoneinfo/America/Argentina/Tucuman new file mode 100644 index 0000000000000000000000000000000000000000..694093e7c4e6d499bc9716d821ac2ffbd90491da GIT binary patch literal 1157 zcmd7QKWI}y9Ki8co75NtD}suML~JRE%TbLHMZA)sDrQLol}-vG2!i5Z1@YhFBm^V~ zLQ7-oD0&Xn(nNbU)@X(DBrPIz2qr}xJS z*&8-%uic#2Ub3&@YguvsTv|TZ@LY{;%F40XQ(}DGc|ATlq6&?BvUvTKc=-CQesngh zCJO7N?#rsjgM)H%cS)2k`1;Alx|%v1kW-5>QSM6Ua`mowb~z`fQ#aN0hbwv}WyFkn zuPf2GnoS;Is!4sn1*(&SrhE!usQa8#K@wL`37b-i{w^CexFZPHZx6^WQ zATG?+?Q;3Z0TpUHnctEdRc)=ge0$=vYX5j9-?4d>=amDy^%!y zsoFUaQ?l?$$QLhVcmE32J%2H>Z(&&NFCK|BEo=VACm2|ITCh3T;(i%3Iq!MKlzcB> zOj&y&W9Iy(mBzfO@}Bd#vG=Q&&IeN&FKKsIn%$G}dO7{RGJhYQwBNYlHGX|;>^ePg z=N2+dWT41Uk-<9caFGE!?U0c{Bf~}pjtm_cJTiPF03-q=1SAF|2&at#3BzgQKmtJ` zaoSLjSe!N(BpM_fBp#;?2#E*@$!TLkf^yoZkg$-rkid}0kkHV@#=!>1-%E5zcu0Im yfJlT+8zK^;(*}t|>9k=YaUy{tks_fYu{v$ANVHBHE)p*iu=igU9JX0+3;zV%t^rg4 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Argentina/Ushuaia b/lib/pytz/zoneinfo/America/Argentina/Ushuaia new file mode 100644 index 0000000000000000000000000000000000000000..dc42621da6d177b442f3b636326fcf81594800ea GIT binary patch literal 1129 zcmd7QJ!n%=9ER~*n`n%J6+uNrDz=ow?(%Q0e3#f*@2JtRQ|Z;vfVh z2trGvT?J2vYHh1M8f&ye38X;;hk)XysKgMzdi+1fMRd^76K;Mt7eXNKlNmU6v@`hQ zMC1t+Ikizn37hO>5RV$4i8#`Vm#=j#58*Y?5b zsF|JKsO>=3JRBL(kMVsdsXGVQsRj`U;Fd^KyM~U$<>|a>dOTE-2Hwsf1hlhe8*2FKktu|b8>k6GBP1D zBQhm2r&lILX7$Rn$h^qJ$jr#p$lS=}$n41U$oxnGNCsX>0m;EDNg!DuX?P_MBoVJ< zf~11vf+XXWY>;%2e7uqnl95+ZLUKZqLb5{ALh?eN7&np`e=eyZxgp6R*&*q9B|juV luVjd%=#?CiB#|tUG?6@!M7@$JlB!p74gSMkqZ09s=npF&_Y?pC literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Aruba b/lib/pytz/zoneinfo/America/Aruba new file mode 100644 index 0000000000000000000000000000000000000000..05e77ab4b3c23ad045a53e8a3baa6141155c8dfe GIT binary patch literal 208 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$eApsr~GZk8xICXrvLx<$1*Vd|9@Zy1Iz#a#}6=Y o`1pn}IQju`a0o~v5QLCm{11p0po#zggRBA3Bv{1-wAF+Q01JpOZvX%Q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Asuncion b/lib/pytz/zoneinfo/America/Asuncion new file mode 100644 index 0000000000000000000000000000000000000000..79541fddcbdf6584058ae7a4dddc5c820e995366 GIT binary patch literal 2062 zcmdVaT};(=9LMn^nh!_{J^?D3WnqYi?~yaWPUVk$BI8LpsG#|fPhf-xB@}x?*R-{! zC@%HN(cE;?;;Ssvi{ghjnX6z!YqQPduGwN_%Zxex{oa4uy6B?I*6-~9f1Ukz&fWj> zj#gH0o)h@T8Sb8NarU^2?@3MWb}{+4=7(n8FeC2wYkXI|9ewnwCY<}jKD=vC#~N|yi_wW{k}{}Ewss_vNU<1$)w!gAgSNC+sR-3C{sSq zvr|8wqG=~;%p(U&WLnb+`)F*hPG8?+9xEH6k1tqhW`ra1M1I6PIk8sLhj*CtyC2HT zzW2;iTA&$SpV?WLMoH$GGiG)yL4)t#v{@y6Dz)QGD5py1tO(ldvFA0Mn`p!Lx+G`P z9h=kpljh#NX7f6~*16wauur$PX{4jaJkxeTo~>)P&mBA_^C~|wX4`Jbuj@0CouS*8U8YF}s%>yjIX?TeieDLm!bqSiDi-rs9Wnmj3u z4VtCJHL|SqmMIGsOH?kK@-ZUI6Hl7uzZdF?{(WZUcR{V_t}>OMChDq7@7UEJ+|ieg zZ?fn&up6g+BAZhB>?ZH1Y>69fw*1g2RlPr(>W)>i zwX@5-a(buMw4O7szFVQSds@u4s))w6w%NLZG~K@Bklm5)X??~H`&!aeX^1bk4gG_% z^LDP?)qP7Eucq4FdRbm?&oOT_pOiOSQp{Viee(A90W&0SXxy;>^5c15_6Gv~eVFHc zVFM#Q?|S!x`;deOPYR2ofqZwBR&bR?-Fy4@iT?W=zt{cRf8+1{^*7I*>itR0$O3rG z3XmlrYd{u(tO8jEvJPY+$V!lw-nM+$&cz}J-kse!L60#XH}3`iZ2 zLLikuN`ce@DF#vvq#Q^+kb)o;K}v$u#Mc!CsR~jSq%KHdkjfyXL282(2dNHH9;7}< zfshIzB|>WC>xzU_$=8($sS{Etq*6$!kXj+dLaK$73#k`UFr;Eg$&i{MMMJ9Q>&k}I v4JjN_Iiz$*?U3Rj)kDgM)GzS=&tJTYVwfmq7KJ1j3QCq_7G(#+e2M=PV#gNs literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Atikokan b/lib/pytz/zoneinfo/America/Atikokan new file mode 100644 index 0000000000000000000000000000000000000000..5708b55ac6bcb7580498bed9721a43fbd5a1773f GIT binary patch literal 345 zcmWHE%1kq2zyNGO5fBCeb|40^B^rRlyd4W0=I{DhaN z#K_FT`v3nb83u;`|95U+WcmMp^#TSCFq;QV3V=uk5g*?W24@!_4hG_IAPxv&a0RkK zfDuZD5Ox*^P$}41KfroGw*LQL^sXfZM1!0OqCrjt(IDr7XpoaZG|1Ut8t8NeD!ZKv I=owQk0JN5Mg#Z8m literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Atka b/lib/pytz/zoneinfo/America/Atka new file mode 100644 index 0000000000000000000000000000000000000000..b0a5dd60dc21f5afc16a0dec9ecd566e452edc91 GIT binary patch literal 2379 zcmciCZA{fw0LSsm8d3;=Iwk>ZM~@ZF3*4e|L1VW+uzq$ z*ZN$h>tDxXe&OW|nwPuxRr5JJP~@Ln{#*NGqa$xlKCNz@>~I1f`K0S;wG(tGR|a?I zI3XPybZBFhKddTShZkh|-5GJ({n~cFCwfkMMz1=N*DvY?-%tA&PT!DGdfdP0i)ne+ z@hKj6`d-eUELMI`3i%wXTyfon# zM=$;{OC?Tb>Lp*rss}!c(Mj*!P)oZ?sk!etBjUUWM-#SStWxqyE<9r zcz4R&tWfo^+b8o9mx;V9xiWt~L_BgPUOxKmWwAzO>&M2=h{p%x^xAih3%O%X7wkW+ z3M((_Cz?J~MXSHm#YF?E_}6Q)B<%xHa^buzi#jCA#>Qm%Y@2v$_-noHn**w%=Y(GW z`SYrB^N@Cql&h*WyY+^yR8^f=t9><#ly5#?*W_GRHJ5AT#-u2*@pPWtbnBX^dp|)w zGjm>S-ajv&ofs4KO*3-K$eW^};-+pqctUMWyQH_Z45_B53Ef<>TQ$!f(=Faw)pBm1 zZguCY*3l;2c4e`8;Yh!H(Vr)_cWsj|sRXg3rdYNgo);atUb%C}jOdK>$X%6M>B|A*`)gF59_^0pHxA?Awi*Gw|l}P z+;{K^1nRcBT!BEd;|dJ~Ivau_?02F4CEDI&_`2c)f&TWOCH9`YuEOV9Uv1u6^Ey6P zp}7|p`CJ91=2BWkYBIU|!sW{pf6 znKv?VWah}!k+~z2M`n*qADKUr03-uelLD*B0g?nH3rHG}JRpfcGJ&K5$pw-OBpXON zkbEEsv6_q^DY2TIAW1>8f}{n>3z8TlGe~NX+#tz8vV)`t$q$ksBtuAwtR_cDl8`JR zX+rXZBnrtCk}4!uNV1S@A?ZT$g(S>sGKQqgYI24o&1$lSqz%a%k~k!DNa~Q>A<09s zhole5ACf?;$sm$KtH~jfM61anl13ztNFtF;BB?}ji6j%rCX!AhpGZQHj3Ozunw%m@ zwVJFVX|Ds|A3_sqn-)RpYa(Jz^)@r<^d-?E64IzHlr36?XfA5eq85QIS_wrF z3AQ<_h!$}+!59PvmbNmY2u#PRX;m#UN-tw^I?rj@DuT{%?{65m!!Z9RJ~hyniT*e% z{U=x-y`f)=rt`lk05r`}5M$@zu7xnv~Y2I&FPiHf_;@w%r<*%^z#*mcfikk3ZC{ zeG{_HjOg}*cSVOT>5lqtlQ|(aTYXk?`B-zw6KR)5ZJ+Kp9gELwzHn7`&6joe^|aYD zyk@=AXf<6Nzg`m#?Nh0X8K+*{MJdi|?OpsKNT##gt xY>;%2e2|2YjF6OqJ|`rppw9|P3&{&f49N^h4ap5j4#^Hl5C2nsDPLkX^$P+YVZ8tV literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Bahia_Banderas b/lib/pytz/zoneinfo/America/Bahia_Banderas new file mode 100644 index 0000000000000000000000000000000000000000..21e2b719f33d6195a65cbce81764adc92167b989 GIT binary patch literal 1588 zcmdUuTSyd90EW++VwvDzB~b)XffQzHYIZYiO*7ptx|&(7YuVa%(J@V|yc|e`AaV?X zNQ5A|QKUhcW)HpOOITz?B?LYQMFpJ#Ns5@xH}w)hP;dQ*Gv6?<`}BR@(qJG-{I+oY zh0A>U@;+1Q$Hm?^X7{J6(cVC@|Bidu-xrzc@B7xK9=@&dzv#H7gfL}dWvh`?8zYzI z1dQZ@37HbR-bhUvk;}pt8ELaWi?r8~#`3Q(#ftH0Yh~{fv8pG+T0PJu(%au#YdTv* zM)e~rv$a#M-PB{bE1G21lGE0@tTLG$RcGZa$dow~9Y${Ge3|>A*4Qu>CiD7=jQpW# z%5!$1*m!+hZEBb*HlKX1wv@-pf&=}kFmsCZ=69;1x$k6gT$9==hGfZ%b7skCx7;?~ zXqMjZklUa6%pF&6h@H1HOy3Dhl%1Jp`h$l=d2_njmFpF|eREaC!c?&*Jwyef3uNV- zF;)2~S?>KdsDi^Wa^I^fYX8j%d0_B?d9Y(dR$aPmR@dBB#E{=Rytd8Dwi(`+YRO5v**>vNhYCfJRTTVSy!WH72 zNv_a8oUo98JGR~B6+%BP5o+7l&Wp*m-7ZBW?=Hi(`+Ho|e|*pPc!j6XE8O~S*4LAz z-}U`<-oG$v;^$9oJdH}@eu17 z=0ogf6ac9JQUas~ND+`KAZ0-6fE40TR{|*oQVXOQNHvUdAoVZ`f>gvP2~rcIC`eU| zvLJOq3UjC{gOmoT4N@GWI!Jkt`XB{DDuk2>sgY46q)JAakUANKI@FaiN_D7fWfTjk nmQgOGUP!@^isAoPa)b^&RRO& literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Barbados b/lib/pytz/zoneinfo/America/Barbados new file mode 100644 index 0000000000000000000000000000000000000000..6339936014862e144f8beb04b55b617f9834c3dc GIT binary patch literal 344 zcmWHE%1kq2zyK^j5fBCeZXgD+1sZ_F%1V`|J6e;U@-cjRz&EAm0{?V|2LdH|7X-^5 zJrD{BxF8(=KS9L6E`WiNnF$L2|34wkzyKy${{O#vfsy0?|KkT3czk?A7@UCE(FKTu zLx6@uKnMv=`T^DR9|%BB1JNMofrLR$1k*rgf@qLaK{Uv@AR6Rk5DjuRhz2T84S+DY} z_4dB!WMonPT=&xB_dTOh%5&ZGxvuVde&1lJv^uJOtX6r$&GO~uxqCyl-J>up_f99} z&q-T(9y2F#sB06)ruLYbdjEY?uPo|Dv(Ll_vA%2UnisR$%!OIvLb2mpYnQAy6AlYH_ZC{ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Belize b/lib/pytz/zoneinfo/America/Belize new file mode 100644 index 0000000000000000000000000000000000000000..eada52e70c77fa2f8c68d7f713fe5708c878751f GIT binary patch literal 976 zcmciAO-NKx9ES0GXN2)1qawsjh^xv)gbNb_U1Z>*;7U^*g6z~nY7<5fLV+!$wuXy7 zHe;pUY?>TRWX3Yn90vPd#gP^rY2m795z?yD|2fpEl^f4+?r#{F&3)eV$@4wU!GEVh ze&KRjsI#|rn8)Q2oxA+gJc+OCi4&hp{>C{y*-|i5y#w}X-CZ*s_1Nk4t7c|@ zr=9t7M-{d;*~0u4Rs6BV&Sp-jXDfU4^Zs@LB@j& Y2pJK17?M}Ugbd2NnWL(fm9lO^lb5vvQOR7%ir*`%!_T@}S{H76T-Hu}QFqU!XXnK0aYDa!Z;GbglJ5f( zDp;D8LHSm+;+}35dsN#Wk{^YFN?xDJk)201dN!|9Q?44@-#_L z3QJT&L*a>Ph$u`HDhd~cjKW5tqwrA(DU4AKC51DpA*HZVX!(73DZ~_J3N?kBLQY|) R&{O!M{@)SA&1p=>zW}cm40r$l literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Bogota b/lib/pytz/zoneinfo/America/Bogota new file mode 100644 index 0000000000000000000000000000000000000000..7a5a445ace508e1aee9a626c21ece56a69717bf3 GIT binary patch literal 257 zcmWHE%1kq2zyK^j5fBCe7+bIb$e9rlTA;;T9H6}~)`5|kiTVHkT@x4>z$DB6|Hlt7 za{T|la{~jfk8cQr6A(N52ZM+ZkU?M&LfAr9phA$PfiV$^`&r96Qkf literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Boise b/lib/pytz/zoneinfo/America/Boise new file mode 100644 index 0000000000000000000000000000000000000000..ada6d64b1afc93b62445fb8697075fd6be6ed3b6 GIT binary patch literal 2403 zcmd_qeN5F=9LMnkg2;`BQ;8vjDg zOGlJR(37@iD!OK4LtMkPmCaqZrnRu=nq{MhO%@No&inLdfAnu#U-rAN-|k@JpU>O7 z!M`b6{Np0bH$2>n=HWd_nUCJLR`pP#7H|%Nl+iOf^@>;AKJ>ayyU?uCd!N%8 zCo0vF)<&JVZ<)$!Y?s;F=cuK{o8+>}ah2mFN$Upmw zyz}!xvErCZ-!*tptnB((-+f?@C}{db-_w3T-CKT4-&em|6)t%}7Zq+)MZb^8;`A0# zeBqERiLMnTCpzVts|BL;l{a1`)@#~82Wc9#`Qe7DcRcn*dwRhdO z$~zgWt8&h&s_)XIZ~iyJHxeV)PmhQVZ;Z(YM-Pdb_Mm)ds8iI|pOhPWw}^*5=XG62 zhk7J^KtI~(SM|~RbwhEfYPcHEjqY^Sc+RK&5wXgDBwug-{#Ui7yIMYWK1Mtq$dgYT z9TQDeiL$vXD7NNI$!$$1#rC+%az}Z$*fBk!1Gxh#F#3hw8NXlc96GIcjR(}zz5V)` zi$2xT+NO7(%2%zm4SG*ck_rtmKjAaN!e{=cejI0CtPqZKP=|y$PL~q19Os?BkSO~p z`CiPn@9||Guc#f|j>sgDSt8T4nt379`mBr6z!;u^$J4kwv{2&QJGK8cE$q|wyBuhw| zkUSxYLNaAFsX}takt`%z9O**x#gQ;1V;m_%a)u-g$r_S2ByU!eI3#mclR6}KR+Bs= zdmQOQ@`of4$sm$KB!@^6kt`x*9aZb(hKga+CWs&(e(SjLbAoR#rw)@=_ZkO65w1sx(U)P&z4yAP5x)D~SIVCm|p~ z5Lz1B#X<0OsFpU`S7VLsP!7@}fR(-zPh8@CL_w$mz!;BmR?pB~`i@n5SFodiH3@pIwQoa!=Bft9RA&D|vrDeM`^3ziJlJ zRxRjvrV>l&#nf?A>kg~hXx)5nIi>2It$zLOsBUabnMS#yzSM^N<;rgTwUjX5iv8;Q zos7RSoKW^!m%nEGkPf#E7Ix$(bX#k_(4PFH+drHwbZlR*I)WpGnD+F|ryLB2re`e*J8NS_ztc5IwtcEP- zl=YAWow6dbB(f&5D6%TDEV3@LFtReTG_p3bxKma~mUqhfNC8L%PALJY!6`)`RUl;` zbvUIEq!OeQr__QJ fr97lQq(G!Xq(r1frxb})iInO6-!vln7)JNS+>e&*a`})xO>%#($|s z=XHcl{@Y7*LAX;D?wh6;2X?3>HKV%7yG}h*_@!Q&lB0?<9!T+>F)4ZY@knX%kJY6& zeo@Q5x+cr}J!ZwV6SDH?&t}!}L$bQ%GxKccas6CXzgg3CP?zPsWy;I8>GIzORlwIS zfm^3l#iT~5xYVuI-&-w}@18OnK5x*$_AayW%1T|;5HZy~v-R`KD@{!_QE$rhnb79% zb?A>NrncyYuKgib)y@1)>INsPtz&~y|Ne-2VfeH(bjH++1KrZtbXjfd-6bytZ<_60 zUHWC;Me|BHqMIfiGtGfY-Fz=!Tllk zcytZ@2PsDX{(jsJIUs^SByqH&KxBak!`F}oA`U|yh(HX9AR<9zf(YekrGkhBkqaUi zL^6nI5ZNHYL8OC-$B+*qAVWfkh>li9h>(s}N{E;YIU#~VB!!3ykrg5=L|TZr5P2a2 zLnMZX43QZkw4;@pAvQ#AM=Ll)a)#&-*%`t^q-Tf^kslHONCY4ufW*Mj1_2TUM;iu6 z92{*RAdz5%0ul>IFd)%@gaZ-}NI)PFfrJDS6Gs~qNK_ncSRiq6w1I&{h7lS_Y#70T zM28U`NPHLpfhyJ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Campo_Grande b/lib/pytz/zoneinfo/America/Campo_Grande new file mode 100644 index 0000000000000000000000000000000000000000..d810ae568b45c223435b142e4e0034758d7f5080 GIT binary patch literal 2015 zcmc)KZA{f=7{~Dg6*G#Oo10^aVPr<&@!}yRvbvBI4G&42kT6SOh=xTHL!#7!%-P1W z&6t+iq%}8<*6ud9(8YGIhdGj^6}E{W%&n~&Td^7Pp#8hQw=a89@BFiKK4%OVFRt$| zTDfIYcIaQvE$%zKcv{_y&q*=&cHoni-0r*A{@H!K#GD*krl&6LmVvx4bzpRc4krI$ z2T#_@&~%z!`e0OshmPq;v_P)Bmt_CCZL5si2EBS$rkNvp~Gj-O3h#ax?qd8f?J3)%Vo=VigX6kX6WX3|4} zrgywA5B+|R&8Sn8X}{Kmn|_pV=TTj>;)Fn%SX6UwkYpr;J^7aF0ASu-_KPJ~FFMHQLCAHdAt_+&;akQ`WS- zrlqMx=9%hewJelwqJ>K|`r8UAPfOPF-Up>3ep)MzC(3h|hqUsoQCW9p(mucWds*Lg z(Y~ZvnQlTmGJerPbYvtnBNdDLvaQl(oCZIhS! zpVYdRGTD0K0e!jhVR@xvhSulLmREP&Z?`2+O2hgD+c0@i8khcR8!z-o)12>Y(^nVF z_G_Qo=B{yheQ=*`+5Mq;EDEwJ$g&{of-H>Jtqig>Ubi;L z;&|QaAj^ZS53)eW3L#5`tP!$E$SNVrgsc;?P+qrE$WnRTS|N+&b*qIe7qVW+f*~u0 zEE%$9$f6;uhJM+cv5Xr%wAPf~{H2SJ`(Im^=w>gM{mYDGNjN7gizG9Wed9yoKNKcs A=l}o! literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Cancun b/lib/pytz/zoneinfo/America/Cancun new file mode 100644 index 0000000000000000000000000000000000000000..a99eedd7565473a72ae1ee8f684225be7b82f6ad GIT binary patch literal 1480 zcmd6mOGs2v0EW-l(8B_w(nSxD2t=~+ZDwWK_`q~Bbt<#cv2>A1Q{PkeTcPAGitNsL&k6YmI5QrrW%eA+^hyx^Tq ze*0al_;O#TjN8@9{@Z$0-&eJIuv4cVd#ToROPy9Tq|)1t%C-4tm9wHoW<c_yIc~ADG+b9iCW*?_Anlz~sCok_CE?!yI=I^s%sOfdibu~ z(>12{UbrZ0YHumo-XUvCo{2hdmE4yyDE4Q$4?}e0xiZz%`%yQyrmL3D$6DB?1aaAI|Ms%1-X0+=>+%_4x2$_Zw&|8Na8=CW zZkzq{d!EZJoJDStnPHBh)A0AX|H6}rUq4!A_`)mBK)ivt1Mvsq5X7SZjY|-pAWkv7 z3edR4@C)J?#50I%5Z@rqLA-;w2k{T$AjCt6ix3|nPC~p4(74I)GeF}g!&8W>3|}G6 zGQ5Sj%kUTCFvMes%MhO-PD8wgxDD|;K;t;XbBOB@-yzO3yob2Y@E_6uqXS3_j2<9O zFuH)W0qG+^(+H#!NGp(DAk9F!fwTkZ2htFvBS=e(o*+#zx`MRD=qo_e7^AZQO>2za oAk8tlgR}?f57Hp~-#ZL8ktUfqqg)A&SVv5hD>lZF=!gma0a>qxi~s-t literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Caracas b/lib/pytz/zoneinfo/America/Caracas new file mode 100644 index 0000000000000000000000000000000000000000..15b9a52c2c4c6f1cefb2b8416b6d9a99f374eaab GIT binary patch literal 266 zcmWHE%1kq2zyPd35fBCe79a+(c^ZJkWH}w1Z!_L_xJOSaU}R!u`v3o+0|Nt)bYWom z|Np=a29E##j~@V$AhwTh2!k^ahq;CT^};|1VLM^=^ZbBn`VRz|?NSaP8sr8L4RQxa T8sruRavj43beoBlu>ltV-_k|Y literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Catamarca b/lib/pytz/zoneinfo/America/Catamarca new file mode 100644 index 0000000000000000000000000000000000000000..7cbc9f4bddf7ba58314977a4a42f76e4426c5e48 GIT binary patch literal 1129 zcmd7QJ!n%=9ER~*o75NuD}suMRBS1U4u@)tD3wz(6vQrVpwh`f1VK<7tRQ|ZPC`I} zAha~vRp@c3mZsWcV~utwfi#HV5K!C{l^EhzkN@Y?Ne3N0;ojfn!d>z{>7kP+x`IDW zMBZ?5y5-_|(-pbhxsg%#FZAmNTc4Y$?HN6_bVf~YzG$Z>$4$O*PtV+Zp&q_`V;`N5 zn%VpoZHF@E@#v_YJ6KSKOC|ecTiwhb3+eggxGMG}ZLvC`o?XrAg}&Qn;r%tc*yq%u zd1uS9gjq@*x3%tws*Tm{=kO_2?`qZcw_~QUF=ZRYviedR)?dr}%(p_qexDgoKkoMH z<&lJP*Z1nxGzZV`(%gY8$Vg&*T+uI*^!Ce$b`s@ z$dt&OUYQh`)hp8?^CA-?Gb2+Yb0d=@vm?_Z^CJl$8F(cHBnPh~fnM6yKEMDj!u^-88ls$R)8_z!!HO2m7jKLJ6C_WS?< literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Cayenne b/lib/pytz/zoneinfo/America/Cayenne new file mode 100644 index 0000000000000000000000000000000000000000..bffe9b02ec9e7959e48036e371252c7c114f64c0 GIT binary patch literal 200 zcmWHE%1kq2zyQoZ5fBCe7@MO3$eH*>d&2L-rUw`png0Jj_kn@o|Nr9$7+C)Qzj}dz m#m6^bL<`Wg|NlW&fM_DE;R4!d%mn}jdNF?h literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Cayman b/lib/pytz/zoneinfo/America/Cayman new file mode 100644 index 0000000000000000000000000000000000000000..0eb14b75c2d64368b037173db998dc5392761077 GIT binary patch literal 203 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZJkWd1Gx8JYh7-@Ju^;s5^~j0`OQ|L@$uz~SQ? t!r%?WuE8NdRUi;T*kqV3JU<{>fF}O`pV_W{21J8wVj#{cE}*TZTmalREp7k+ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Chicago b/lib/pytz/zoneinfo/America/Chicago new file mode 100644 index 0000000000000000000000000000000000000000..3dd8f0fa82a60710c0711f35dee93ef5013ca796 GIT binary patch literal 3585 zcmeI!X;77A6u|L=OQ?yOM&c4$mSiHEp=g+zG6aRVAg*Dqh^ag(qNw5HL|RQ{Wu>;@ zhGvVQk_wrKh~{WcDN2$`xS>(#ZeX~vnF$&Y>LFfYH$oy#!8hx0#iLzBjZ z$lu1(zQe=(Y9C(vX!|X5jlW*@)it$zegnPY{iAB-z7$#Y;_oIa;-FqVy40*0P%0mL zFIH>4LT^kQta9e` z)L%w+Rhx#l(VP8Rsx9HQb?#j*mDl2|&Z{U_TWi;nt-A}=*QYCFTXvbrFWMv97Z#X; zp(V0oeYh&jcujW3W|*QelVw-nII}zERsBuFr_7!X6ZM|zv1+eZAHDZTq}ungOCM}= zMI9Vj*A!2`EQfmhVh#^HEG4b?n;$~A$PvjlrQS=WwEQjeqid5sS}@HVD_g9OfAXd( z%TLoky)spmXAaXRlH%0K^lti8R3~*hp_M)}DBM&;hRfN`zUG{tul(FpOr`y=RbD-5 zeyKPqzm^o4s@+9$e)|b^A-i0gWjocyg@yXk+|}xGa+ba_VuAWCcD}yaJxTo@I9^vj z@{+mMFj`-)lW4A2C(4a;QRe26DEVVgd*jM&FYdKoMwWPq$ASx{#*7P6b4qbJbv_Z6P|rjciZ}gd17Ii?*4%?J(3G_&y3gAld&sxuQAE0 zcVLE&=-p36T;3v)9VVH`;-wPh6>Fk$W=nKcuzC8!#rm0&J}PETn(nhXNW~5xru)9v zSoI6w&!M=yg;9Vj^T$|0%tdlaVY>643Q6$gi z&oT*P*2sv=;pVxRLOpUpni|z1OOH+*rp9>9*JC5Qsj*e#b)sJ@mAF4zCwY3Pq;>v! zLd7Zd{CiO{@jJJfl-gbkf&y-FfRhYPsE?EtfjHmio>+jhyfI-g^I;m^kUx+dc#0B*H$u2HB@?oZW49 zJpLl?-}hpb{j9SWt8e|1{p)UbLQU6lWKSZy64{r?&P4VmvOAIeiR@5hk0QGi*{6|SL5B0Ct_!^kd1_A#=Pk-h9_cQdk|9qo=r z_B67qk$sKqY-DdEyBpcx$PP#LII_!;eU9vOWUo8g-Hz;cN4w*ZJ&){qN4xKlosaB& zWcMTcA87#60i*?vwg*TPkS-u?K>C0*0_gwIHYq(>yX|d&12{u(msa%Aq`~cAkspH z9wJR-=pxcahCVvlMk1X=T8Z=$X(rN5q@4`?L>kJ_QKY2|Jw=+z&{ap@3|&Ur%+O~?+h~SPBduoWHPURP+eo{Sej^P> zI*zm)={eGLr0b5h?F@Z)w2f!zyrXSBL+_F1Bi%>ZkMtk80gyWYxdo7W0J#Z}yTH-j z2FQKjXm13DJHgT33JmuGax*a84an`la6ce71j8MH+!Dw=f!q|xU4h&d$bI2xZw%zl zaJ07ua&I`=n*+H!klTad{y=UJhC2kgMHuc84gWNN6N7u>!2hBeoH1+Jg)5+dF{xKZ`LIQ&PV}gSNf&+r; F{Ry|ZcJBZH literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Chihuahua b/lib/pytz/zoneinfo/America/Chihuahua new file mode 100644 index 0000000000000000000000000000000000000000..e3adbdbfb25b557db2a2edfc721c365d90706233 GIT binary patch literal 1522 zcmdUuYiN#P9EbnoHEXTh773A1qP3Hq%y9>sv3IlW9ka2$W8N8?ZN}Oh@1hkA_mn8d z5@E?wJ2;h3LLt+NNiE5i6isq!J?`t}gYrrF;&*pH*V9wax9jp0R`{ajj|JK>(p2)nmU^D;bDOqs%omqHdk1VQwWRmus(8<|tCZ%kb zPK`KX7N?fzv{744`t&57{-s4_xTfihXLV}n>k)ccSH8-8I91%o<5kubmt;4LSIe8< zNKR?2$*sI6c}e4pC-bu8Px)$Agf+`bd0`61UepDBx6P_g2Xx_`Gp4AgQm?*rORZ_o z(#88vtCFLU+FPMj>29~I%~-3}6-P+f%v800!D#V?dQJJHk5WFEY&Hz_NJaN_v+-r8 zY`PX`D(^nkRV@Q%^M!U@UHib)G#%5mh5c%auTj?}-d9`GiuAS#t*SnvLF&I9R1KpF zrJ=W4HNKxG+i&Ek9gio=&T~~}*Hw|-d$Y}+L$5>vhH-}5;|ln<$8kG3D9z9sqBcWsh~f;*A*wTUhbYg`9-_XVR)0tWkPILxKyrX2 z0m%ZA1|$ziB8*HRsW5VZB*VxCl8&D?A4o!sj36m7a)KlU$qJGdBrix}kjx;dL2`p6 z2gweSo}V^9NP>Ra3?V5pa)czw$P$t!BTx7rBpPh{jkn!~xnshk!Xv}nQIX-X;gP{V E0A>k%D*ylh literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Coral_Harbour b/lib/pytz/zoneinfo/America/Coral_Harbour new file mode 100644 index 0000000000000000000000000000000000000000..5708b55ac6bcb7580498bed9721a43fbd5a1773f GIT binary patch literal 345 zcmWHE%1kq2zyNGO5fBCeb|40^B^rRlyd4W0=I{DhaN z#K_FT`v3nb83u;`|95U+WcmMp^#TSCFq;QV3V=uk5g*?W24@!_4hG_IAPxv&a0RkK zfDuZD5Ox*^P$}41KfroGw*LQL^sXfZM1!0OqCrjt(IDr7XpoaZG|1Ut8t8NeD!ZKv I=owQk0JN5Mg#Z8m literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Cordoba b/lib/pytz/zoneinfo/America/Cordoba new file mode 100644 index 0000000000000000000000000000000000000000..cd97a24bdb7bf9661349f903818ee2323d107fae GIT binary patch literal 1129 zcmd7QKWLLd9Eb6@Hq{seD}suML~JRE%TbLHrE(=hRm{=`DxDNW5Cp}+3gW-TNeD;~ zgqFs3RXm4kX`?-jHQJ#B(jtOGKygzmv(B- ze6W>R!px_R+gevx)kf;}OUo%$?`+le_ammUK4lxFiuzg`)Qgqf=36mgzZd${k2}43 zX(*xG^<8?^*dY^cJDuO28#isOxqN%_vuXcyF5j_jt?DQr$;SexV`gtYUahDdxB8>W z`g5~qI&OLfSDT(Um!k(3N6o>)v1rqZ=70RPgqD9RvDu%UE0%-6KSR!yY!G&Cq1?2_ zxwloG3uWHO`*q9b!|61K3+)($;c}yAvqyQAz2}5A$g%sjDuvx-%DypZb))Sc1U_&$qz};D;Xjw gdL>6BNhC`oO(ahwQLki*q>AJk{D-|pB;uWspN(PoumAu6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Costa_Rica b/lib/pytz/zoneinfo/America/Costa_Rica new file mode 100644 index 0000000000000000000000000000000000000000..c247133e334bee3b7802741383bddedb9cdf6cc3 GIT binary patch literal 341 zcmWHE%1kq2zyK^j5fBCeE+7W61sj0G;um7Rf@Yoxg4^=~gvGQIgr^4ts84^8ppm!j zf@b-l1kFP?FEBDQLE-=Z8;uzlz$DB6|2sD@a{m9ndI1Bkk8cP=uosYUb^+qx5TL;j z5JK2F+(4CJ=lp=``40q{?OMJd8st2XFvy8u8t67LHDf literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Creston b/lib/pytz/zoneinfo/America/Creston new file mode 100644 index 0000000000000000000000000000000000000000..798f627a81e25f9657c12909f4b0878f55eea9d2 GIT binary patch literal 233 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^ZJk>}%cy^L|=0FfuXz|3B#n1H=FSb0;vc{QuwI zz`y}v`}l@1_y&hC1OPD%gb=m{W+l%LsHXow5NBJk2SkIc1<@d@Nwu2`=ooV@0J&g0 AN&o-= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Cuiaba b/lib/pytz/zoneinfo/America/Cuiaba new file mode 100644 index 0000000000000000000000000000000000000000..e3aec8ccdfe76b9eab95158461855b3595a630d6 GIT binary patch literal 1987 zcmc)KZA{f=7{~EHRLp49+}s>X3?nlF@!}yRvUVXU8Xk-|Az_xn5DkhXhNLM6owJQq zTd^L@Ce68Jw066>i7vK#Jdgq^=^EqR{cyWDy z(W-4*a)N(7x47@{;%Rd)J}1T8+q<9c>g>5|{hvM8OU8LwZPKfEW}3;e0-ZeblT0U$+Ua8nW@c=T zUhC*EDSd}@e(Nvho~|OBnmcS3wC~V+@A%oIRnE6*m-ooR{J<_8I4_G9r0AmFag!c| zn%?<=Jox+FHltomru|A6Z~0NOx{m6SRVPGZ2Q)ingUMN+Wpfi+B`?2P^Cr*A($vMe z^lXb+HZfrHd)|?UN5-|_tpf8%Uy@zkIL|!#^}9|D&uFg@>rrgb9q>+ULTVU{ZsbYt>4SW?u+)h)kdE0 z=(U@&zcZWb58LY5pPQPJc3U&uFSQvpw)Tf6Q#U83b;qM->y>KVc5u7AFz~q6?<|*w z6Zh+jRS(HaowKyDV6MEp`#!rpc}ki#CfKH_i_*OOSKEA{S6b$MYg@j&V0K*l%(iw< z$g4wd+MRnpGP}O%*4L`r%e?$rJtWTW)th=5Kk@{|0|D47q9KuD$NIk^4q&9JzC^yLIH=z3%3byGL#x zxqqYqqywY{qz9x4qzkWW1L?!-8bLZiTH)WP7o-`a8>AhiAEY6qBd==->B;MwLb~$0 zwvfKOt}&!Dq&1{Bq&cKJq&=iRq(P)Zq(!7huWJ(N((Bqp`a~KI`+Djk)FM-X{2kfYa8hsX&mVsX&vbuX&&hwX&>ny*#Kk*kS##=!0R>v*#)oL z24o++ZX=MLK(+$e3uH5p-9WYj*$-qxkR3s`1lbd>+Z1G1ylz{Neet@DL3Re&8f0&f z%|Uht*&bwnkPSk12-zZJkGyV^kX`b+Z9?|R>oyA6DP*gVy+Sq%*)3$dko`h74E>Hd pV;MK@zrD8XX75^j-2Ym(L^pDItX*a#Te5PqWQk-(a&CM`{0EP#cw^&XMb94-=90bvOKJQOr?2;!^F+ud0oz2ySXV9 z%4BEiM?R$?{wm?ngG78c5_!LnXz^4U?+&Eta$B17=Q?(DqFeU%b?cVsw(N#(UtQB3 z3yZol?dtgWr0zv=M`?1|NAaJTGgJ%ZT^c=wdstj=2DzlSEbAgr7Vk2 zxZOXrDe)yUo*5c3bV6Doy^v-|x3Z=k(hq5fbVOPrJ&~qJSEMb{7io-iMp`4ik>*Hu lqK9HRtdqH-C><8IV*?-zo&@9dgegRmFZs_EAg(;rJooDfY{7C&~%Md3k20#*aZNBy?&Co*S!K zE8gkUw(9!G{HL`hj%;ogGq>8Ba~ZVOvN6|IHjOp4Eh@*?&+}9N^+$iU^}T(*kMDg4 z_uuOo+1T1#=KIGr)xP0y?YD>Lc?o+dJE=?ae-ankUaB$JUaPF@i!&}Ss zZ^dyLU(~PCN`96Zt-JKh*m{*&6VbDlEmgDgD|OcV2AMN0OJ}E6NcPQgbI~0AI;FYZQrT|6~E~F)_kNEW`3#j3X&>sV$9^HCsh9E z7pCCaHnn)@lqnqPk)q^b6Byhg#XZlNlEW*dwB?X4>s%m9LS4GNVTx4bMRjG_PqH*E zsDu8=DtI+pFZ=VVTK?H>U3K|$wPGO4+<*2x^+4Yr=D{~#RS&g)XCCf-S60>ynMWF5 zk?LY&R#oqiRkucTsOV)C8ab_NGPbLl!DD*O*gCcLtwB@!b(@6u9Wm=Ztd+V=yG{Mk zfIPY)W*&$oiWSxt(L86<J(-ewH(^#)*$*Yo76GhRC_dEcl2FR&sN^8ceS5aotb00tL}vA@}`-1@SMa)ubVwt z{jz81qS-s%E6*oSnisxlmVG_1o9;8KrDt=e+5bj?q^8))_JM2!*$J{0r`-#(8Duy7EZaf$gKP-d5way@ zPspZ_T_M|Y+I=A#Lw1I24cQyAIb?Up_K^J{8$@=9Y!TTbvPq}iC9+MY-6yh9WT(hh zk-Z|DMRtp97uheeVPwb1mXSRpn?`nxY};w~jcgp*IkI(R@5ttn-6Pva_K!3G=>XCK zqz6b7oVE){8=STeNF$uK6G$tNULegtx`DI<=?Bsfq$5a6ke(n-aoVmRZE@PZAdPX_ z&LFKpdV@3v=?>B!q(4Z5kPaa&LVAQW3F#8jCa3Ka(kQ3x6w)fE?G@52q+3Y4kbWTz zLpp}E4CxutG^A@t+nlyVMP@5(yr^htCY?xlY%JI?M&iBx+MYQS;_k0e+h>PDhe!L@5tNwV_ zxNmrQ>fFnHL$5nV$8Xz<6EX8(XroPTTIfu+ZI&m8XZ2H^lV>{~>nZh6Qn6b)mANL* z3y~D&z=DqGc+bGd$V$jk$Xdu^Ubh;u9I_s=AhIH|q}Q#9 zEQ+j(EQ_p*ER3v-ERC#ERU>@6o6EKl;CwWAVqjx6-XIK9Y`TaB}geqEl4p) zHAp!~JxD=FMMz0VO-NBlRbE#XQWsJfQW^TvxKJC9D-Ng*DG#X+DG;ep{r?QhT$ek` FegO+5`(*$C literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Denver b/lib/pytz/zoneinfo/America/Denver new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8vb8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48Hesdb*xmIy=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE ze9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<*7-ulRIbVydb;yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fKBc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){FVrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Detroit b/lib/pytz/zoneinfo/America/Detroit new file mode 100644 index 0000000000000000000000000000000000000000..a123b331e72fc35cfc33d7386b48a3789706488c GIT binary patch literal 2216 zcmdtie@xVM9LMo5L|_LbloTpaLB>FS(=vDTtKMt`{2xIWL%`mewGuXVT2=W)C3?*6&g zGu#;6Q11QXk!auH@OauDUT1E%SI-AItM*sz^*(vZ$obqqxb5iv6`N<|TCtppSG-{^BC#z|{`(*m}qblp%FLJ}lvnu=W zXL93#!|JAX&)od@9+lJZgUQ`Epl|V?HZvAHt8dMC#pD%i)_D^jO1`f_=MTRu1wYT$ zGY5A_;ZRr=^+l!l=qy#zwNOg;WU8`A#FTIPQ&j|l=C-BZDSzHfGpl@1%}z=+mD9h` zl_S5IIpgo^xo6IqdEfTws)1kS_V>2yJ9d9AcRs&fSGOIJ`Q6*pUG;mVrfHq3Eoqm! z+8R|i)^7quYjt2~mkFlM*TJK$X2EE#UiiXJS@dPK3T(`qd#>(wm^}sVb4B z509v2S?LmP_*jL<$7H$xHMRW5$!10N8NK54C9`s(UpF2aGOLDn>DArG%zcBcdQHuY`qeSx3Rpiqui6)Mz=$qNHapVit(mU6* zp1q_WXwNba4h-qGh6y9PkLic}+H7jur#EMuGF$39^_Gc?(q7rC+J{d{M`nlW7(6JQ zmmAe1eLeE%g(|hRD*%BxWDi4zu3!V_ZfaQ7GpQac98ub z8*8Z^-75-67jU_J?c`*&(t;WRJ)ukzFF& zMD~en6xk`VRb;PDyIEwn$aazaA{$0_jBFX%GqP!9*T}Y!eIpx3cJ8!WNA~Wtn@4t! zY#-S_(g36bNDGi2AWcBJfV2VW1JVej6G$tNUN~(tkZvIDK>C3+1nCIU5~L?cQ;@D8 zZ9)2iGzRI6)3yfbjng&<>5kL32k8&eAf!V`i;x~6O+vbav88`R6X~bZHWcY7(o&?SNK=unB5lS0YhQgfzJ43s U!cawVd2wlBsI08Gthm(o7dPd97XSbN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Dominica b/lib/pytz/zoneinfo/America/Dominica new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eCtlrxHU!BP|I6Kk2@lt49^Pju^YP-?iSu291NzOOVAZ|rW!>%DuJ*iiN$+i)DfZQWqI;c3 z)mx<(WnWaU>d*LC_D_8zK5Y-vzJA0`OpA*l>$x9VtYA{Rs`(s5= z^&CB;_^O(jGpU19$5cq%Ssf}5D~A}-j`2O}+Ved+?97M=Kir}tKI{>ZZ+A=idW*QO z{RMgbmRfOxr$)|NoiCya?w7N(62y%Ox5?-Qd1_9mL(UD1S95<|q+`AfRk6MhJ@3em zYX19|^-Vp;Rh)O8y!rV7wV=!|7uJ6!ZgGAiZ(Z@8SQK?s#wThKKXpzgI5vocu_HS1 zvRm9fyjLfkYE;Qx+jYvp61BLwPN%-QM5WcW%Jhx1RYv|gxuj%5IpZIYccg!*mIf!v z%$Pq!=EX3XHF-v4ANyI}`PGnEw%?)e8rm(E@AygI-MLNVG@Q`)w05d{i}vgLYPPD} z#johR+_ft2w^5m&+$8c(^~r+pDp7E-U9Py2BMRT>)hka|DpymRe(;0ks;JVVi#y`f zL(2+vi8oM{#wKfb*>}o)HBy&5kE!zSlVrvG3!-8)Lav?~6>Ij5%ZJDML}jZ_J~G@c zs%j3&wO#AQqpp*>x~)w;mV7`zUguFY;X8G0exa(p;?;HW$*S&nh4utTD$l#wy8ee> z)cTH9@`;lX;z@6od}?4^G?d54#vMNKwDT{yq2Z9&7ozY ze*Zh&0YQHMZY@IWdzk%{D_w5k$~8}^c~+UH*llJbM1cKp|BJaz*uUdH`TfienI1Af zWP(;RLu87`9Fa*PvqYwe%oCZY)yx!`Dl%8AnJh9}WV*gUcX6ne?k;!w+9+^JJ{E-B3WB^G4M-GrAaAW~V14kZ^L?D?!Qeic@K$3xE z14##x4OvUQe!o_L6XCf9V9(Sevkwq8A4KoO!AORY6m#SlNis*4ku-DU8A&up zrjb-zO|Fq-TTQl+bX!fnk%S`|M^cXD97#Hob^ITv-C>Hq)RaHTm64L3lA7d7OG`;h HNp<`UV`q_f literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Eirunepe b/lib/pytz/zoneinfo/America/Eirunepe new file mode 100644 index 0000000000000000000000000000000000000000..3359731e2d23ab794784e2e88d3beee04bfb2137 GIT binary patch literal 684 zcmcK1y-UMD7=ZD#wu2QFK?D)MI_sb!h|`Hn2gR#q5(lS(e}Et^t%D%AIEmuoY9}XY z9OSm5Q(Q}1E4Wz%@vF3!^L?V=AP5d#xcn}GKzLqvc`Y^W{8&+W!)C>0bMDH>Z9lt{ z$#tfjTyw>Ek6NFX_f%o(N*AgmK*oWL1pj#~F`3k4>;nw#9Dx7; literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/El_Salvador b/lib/pytz/zoneinfo/America/El_Salvador new file mode 100644 index 0000000000000000000000000000000000000000..9b8bc7a8778a0b8caea494cd5995318ca30d4fa8 GIT binary patch literal 250 zcmWHE%1kq2zyQoZ5fBCeHXsJEc^ZJk;;YLP6lUFdpcpedK&fiq17(}q00t&TApHM- zO#%bM|NlETFtYsrzj^@!hmUUvgR=_|2ZsQ4f#9KL%xx5K0lDV{5HB z9&}Q!q9On+5H{kw_6aVjP>^V!s)w&B&k z)DItfzy6!Vpkt)-?U@U$LH&KfHg&N)PhU#OQscfdIk9A`m~`dJ$xx=4igwDW;W%-5 zd`|w*_nVkLGbVrRJSP^8_3B0EhD7XOr@rRBUa`36O(~9biff-$a!GT8SX#4JE(>lK zaRmXne8YONA|*%0uPRa3Er^#1k=ZICl%*3dtWeJ3#S$+Mh=ap<)BX2nH zky=$6D_7TkE0VK+k~ePoP}~&vrF5ls3)kGVPKjv~DWjk3)T!NK%|M?{JKL((b|2C1 zle^Tq*2i`Fkxj}|eOP8RB&$qcqs%Iqud-dGGACnFt&jFfuXCR8P9(?;e@=+pGgoBZ z`A@~hx8n89AG|JZ>G(t6`r;vRTitj1_VymNsc1moQSqG0U#In*`Fqq|iw|mFa*^`Q zz9I{vJgQ*ifZTF%sk-~6le+NhTIFv!s_*G9R7E@Y>EbT8+PX2Qw>1e>;>^>5?I9Jo zlB7$szfz@_a%I`dpGDaxP8pm#D|VcillP6jBX+ir$@>SM7v&YFhHpkVxk62`H zVee&y{(!?@5^xlA^3A!|oZ^7liFqRaz61YaVYBut{AxJN(vY9vOr{o zRSt+tqWUW@SSY)+Uvs`4o$byj-BTMG*ux4b@$f}WLBkM*M zj;tJ6I7qDS_4004V}e1*8l}9gspGl|V{?)B-66QVpaWNIj5( zSWQKcl2}bmkfI<}LCS*E1t|~B8&WrMM`QlHARYQHC08* zYBhC53X4=0DJ@c4q_{|Rk@6z-MGA~m7%8#U)EFtU)l?ZNv(?lYDKt`Pq|`{Qkzym& e#{Yl0V@%e)ChKYbOm~JmJ4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1cL z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4L_+`3MZ%(?nwC88dqxY1*W7)YCfBszpFI_w?X-TJa`Oonr`=q>&AqwC zSlwVI_SvrvUb^IAYEj%q_T}wUQ6`%1oy5VUNR}F8Y9}Sq$!(djL*jKyJ8zZKB7Za^ z-+g{n810aS_^~K@FP-9UL6x4XjYk7E7%)H6ey`uocFwsupLb|& z)1%pee_T`DI~=Z;+~GN4ySr__@J?>uz)pSh=uh%x_s=>q?~jcmI$>;Ip^i1>OO*bm zQ)NV=i-z^8%)=5hr(ds*-z~9$UX4B9A=lU~I_-y7P25n8PXD;wT>EaDjeot`T-US4 zUf;Rc+|abhCe$RG#L^^tWB#PMY5o;EV{Ve%9COCbjJP5*FZ`uRV`n6JcvNQ{J0`O~ z9MW6%za?|pF4~mmcFNqUZ|%J1{pQxZBlflx&zsv5U$dzT8cgcsN17H}V$w$U=z?F; z%pHTBnttj&$>>|B!T$NOaNAPNd?i-08f$EJ>#veiQebmeeJy#Zv+SMOAIM$NVVj?H z(&V2Tvx_djZx(;?nJxJ7WwYdv)uo@bn!9&>s)cWaOi{}LUAAMB+*9#_-dkTO#S0(P z<;8ij{P(@KBx9v1In`xL<8n-Cf1O=1k!bGUw^hr&&zAD8T7BS1f>f+8RO$UwRxU}= zRc)uGGC5L1RYxUsd0bcL?UU8x5w?2vfT{lctgV^cYt|k(Zr6==ne{sk+SQFushv8 z{>@qQMEi96iHGgxiVbG-@VC&v7|JF8IesY|W-!QTkl`TXK?dY?BSMCR zjES#dP{^o|VIku}28N6b8JgFP4H+CVI%Igr_>ci2BSeOXj1d_mGD>8a$T*RKA|pkH zij39k28)aq87?wjWWdOXks%{vMh1}uVBnU_pkT4)|Kmvh80tp2Y3$F_X5)C9ANIZ~$AQ3@Ag2V&~3KA70EJ$3Cz#x(F zy3io8@w(t3(eb+QAn`#0ghU7l5fURLNJx~BFd=b50)<4%>q3RZ%IkuKM9b^Kg~SU9 z7!olgWJt`ApdnF1!iK~R2^pZye@c1^t>*7Nc_AmfJg+95F#-|f`~*B2_q6m zB#=lXkx(MB^txao(e%1-BJuRPfFcn^LW;x`2`Um*B&_(qifgQEZmw%@dU;MTJD8bX Mo|P5M3TDRs4J6`am;e9( literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Godthab b/lib/pytz/zoneinfo/America/Godthab new file mode 100644 index 0000000000000000000000000000000000000000..111d9a8178ca214aae66804548a990bb2691644b GIT binary patch literal 1877 zcmdVaYfQ~?9LMpKG}f@MOF|_i6-T$D+{!K0qC@E*m*kQPi9#!)jG0+$Ut41q!&oyT z9)t%Ka~(r$*34~8bGK&0+}4iY`)?k3g#!1eLtSper>0;|Gh&x;PP(uYibt%6ZI0X z|DX)4QVA^IDuW7&W$?mfG9=X{L2-#P)E+Cr{vk4~TR#c$O3;wcqcrqIu!gnv)!|pW z=!k}|YCrl$N7g*l@a;D>VttcF=A6{1tOpWZ+#rtlMj170kHiEVkl5%_iS4M6F@7~V z=E)Kn`=dn1H7CgUN0}N|m#y)Q<8;D~$vUytUnk`T>E!KWWlEBlCgk|Z)ZkZ|n9w0f zUN%E%aQLHPjm+8tUE?sqOffigVlGQuo zwXiNj*5n83+8r@clpZI=xxTV4YM`u7{3aWE+N31HM@v3Dm(m_zwDj&3DQkbA8&B8C zrj|x6-*-tj*B{U=Wk_kJ~()Ef1%QoG)_1$QdK2Y-!FJIcemqk<&)b8#!_0%#l+^ z&K)^<KQaNx3?Ng0%mFe9 z$Sfe!fXsuXnFwSikf}iC0+|eCHjwE+<^!1!WJZuFLFNRR6l7MAX+h@2(o768GnQs* tkhwu72bmpYdXV`+CJ6tt8S*yMMcIu=DHwm&VlcXw=#R-hxnj>cm z?whM-lpi^_+}~b3tn`Q@y50VLw-dqV$JE)+UoQ#1U^r#39d~bSt=!_cR21v{DZ$H9 z_vQp={lsR=&lVr2vsbm{a?nlZxILuP4NRQzh$_s&B6CAEpY+P-v{_RZcKROywTR~xar;CyjfG{__L+L={kPO zaqHXx=k0GQEO(B5ZEf9uz|yv3zxD2x4=n9@Tdnt&Wm)bgue3ghde!pRlzG;U;1!OJ zUshWm_BY9oeCDfd<~_tZ|g>>w^ktcxm=?5txT8ueYrvHe{{X(x5ujb z7cbTZY?`SK%$cDLN*}2Xjvt{7F?Xs%gP&*tV@F8Ce6DK4jZdWEZEktQwX4#|nnwAB zGiRhx$1CO0pL{BfSzqrC+?rz>n`(ED%PnXaAGz24VnViS!eGT6G%4N{)VW8Q=sVdp zv2n99>1JQoPQW&U&?YwOZ&z|FsSf|>i#*KDQTX@128Dw<7 z6j5TE?%m`zkNDhWZmDw5c)Zg!^W0(gtSkAh*~iW*bG}M*y}aX)67{j!6`fb4#B58o z%}vf#Udftni=C3F%!?gno7Z0Gjth-+#a)u!@qLH7;>+i{6I>5n^Y<-N66=T9l9U)_ z!54$1Nj)v4JZ_fLBKJvY%|^{S%C1_^Hfiadd(|Ze ztF+e}6?N&h!&*i~rka^mtSu{xR^_Ba^78E&(u&X`d1ZE#l+`y^UKJN8t-6&cX9syn z*%wW6j(4+^^O>KV+fps9-W8(Rer!_LZ0M)Gb*xIwOKsKG?l`Qz9a*cb%PUsj8GKU9 zPhPF&ch<-o%tg|M#&UUMK(4g0qEz10ktn@eRv>S7nk1z#U4GB*ClzMSlDF)>D;c{P zy}EaKdH3k@L)R~!J^S>0j#sbec=mbzZr5i$p6aCrgU9njiJ`m4({Rw}BhC)}?Bv|w zH1^To{XCvJ)i_8zUug*Nc$zfh$futqCZ!k_EE0FDxFvBfPBFxYbBy>bCRQBu%lH)D zpB_{GeV{IihxLK#_~(~B;>w&edpt5&$Y>$Mg^U+6V91CeLxzkQGHA%CA;X4@8!~Xn z$mxoqL&gpnJY6w*$nYWKhYTPxg2)geV~7l*t{6pR7gNe zqpT~285w6}pmoJaBSVdhH8R+!k2VK}n@fy0FyP3DBSVgiIWp+Ts3XIUj5{*$$jIx8 zp-09Z8GK~)k>N+i9|-^w0VD)S43HooQ9#0g!~qEe5(y*}x*`@xFpy{<;XvYn1O$l) z5)vdPNKlZdAYnn`f&@laLrm=s2)to6(e=Re>^5@28zVW?rt48^j)lqM0IWF zTM_hD>AJwEsGpIi8%kcQhKVn_aq6C0{bWq9c{8Y*hCa)+cYD>kGcV=(3%gYF!5cDk z>V(+PeonUR-YHuBJu=+dBEpkbb)>kR9i<#O5Zy>^w3dx=Qn}LR=C;~a$oTu%f1h!}{Mw!Iw_ksA%t`f?)gUwHYvzv39+^MV z0MY@{0@4H01kwf4hO6lVX$0v6X$9#8X$I*AX$R>CX$a{EX$k2GY0A}fg|y{r`a&8* zIzw7RdPAB+xE-Iz(DTdPJH;x!p0qU>bJ+a3jic-D@gzV literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Guyana b/lib/pytz/zoneinfo/America/Guyana new file mode 100644 index 0000000000000000000000000000000000000000..036dbe06f50292190d2086e0b70eb63a06662109 GIT binary patch literal 270 zcmWHE%1kq2zyPd35fBCe7@M~N$eD4ovg4z<4iBR%yU0?81C@y_x-0nmi=Um z=O3?|%x`#j)tZOTE$5iGIdvbuzwE~v-#dp}I7Mai<$Ifti4Wgx?XGZ2oR!a~yB~#h zbc$2n_Z43o?p@t~tGoJWFYnrR@40Ia_H{nKHrM@R+iT9Jzf`;HUTJXFe>uqY&Q5bm z)-88G8=C7aU0C35=)A@Ge3EoGhWfl;4135e3u^7%)cGghrZbnlTN)qmZK-JGZ1rTi zTi=cLmS5W7Zd?4lOJyZI-dqc)L`+86E?Ymsy z-QP<1_8*bn0}D>d%DaEht&|s3Ro-F!t^18Slvbt>cPdfe#V*s;p4Zip(0qMF=c%JL zv*fWgQ$GXsEhUKSWcHHKqOtr{-XtnT6SqR%>}(K0ol4~` zA#aLc&k`AYafY~6PnWkHc|$ezWyofmrm5y@@^r}CBh~GNBlI0J`>Gb%eRRu=b}Dpm zm~Iud0MZ1a`*?#+W(V@gAJASZ2bjrJ=!{^Qy zccp!+@6P!^b&lPr?-}xwhW+6Btg zw9O**YOPFzEjZqQScC#tCp6>?hK0x_+oRL*FVBW9E@ku&QP z#q&ke<*e!`kzbG@UnmO^1zG*%?4_54r%}KS{scC@!7tqCKltze-tXTfJs!XRkP2w* z_wV(2g6!wp?0ZOJQmSXbK=Y)SXM}k~su?PeC&0d?-oU`s+wbw8{CE7a3h-c#-i%1{fJ(WQdV5Mg|!fWn`F9d zh8h`bWU#Gfw2|RP#v2)MWWL<0#25)Z2h2oez_BuGrq289nqg_j8n z5EmpcNMw-EAhAJ$gG2`j4-y|FKvokWBt%GzkRTyZLc)Z^2?-PuDI`=#tdL+K(L%z7 z#0v=+5;3a@84@!jXh_tMupx0n0*6En2^|tUBzQ>lknkb#vzh=R5ww~RA~8gQh(r+y zBN9g>kVqtvP$IEJf{8>E2`3UyB%nw{ttO;MOp%}>QANUv#1#oF5?Lg)NNkbdBGE;{ zi^SJz0*pl1YC?>}*lL1|L>UP)5@#gPNTiWaBe6z;jYJy>Hxh5F2{;mQs|h(0bE^qD z5_Kf(NZgUYBauf!kHj7cJ`#N-{7C$f0{}S!tmY6vjsdGV2#}+|Y7PVBI6w{r{&j4hH0CKn@4wct8#a@IWUkT135I1V*@!j@c*NO gGvr^6j$m_(^fEU|WKz$lm?$SQDLOhTI?4(D8&gR`p#T5? literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Havana b/lib/pytz/zoneinfo/America/Havana new file mode 100644 index 0000000000000000000000000000000000000000..1a58fcdc988ea6ec1bb660ceabe85ea19e5b0774 GIT binary patch literal 2437 zcmdtjZ%kEn9LMp)-yCXL{y`%Nlf+6;u^XxQwI zbvPPo9rB!}K@Dp->4n#JX~fn+H?s75iSkYHqTLg6)3h9ST*4t4H&XA#I1Li>Q>8op zVo)Z0yVslO<;u;+H+#3Vy)Bb=9dvKa|50x%Z}%prcI%WC2fV38&+D`~^6!^z1v6UYTUUE?j8LxI`c@T8^5DK?rcnO6AFDYt2*4hYgxX|UUkV$oV`%* zPC4bynfkIO#SVH&W34iGq(|==*eCb)9n{rtX>~&akbR{hCJ3#EE|(|$m11DrEKB`*_1s?HeJq?ilhjs_$ony(LYG=gK()F zJ}q1J$Lka4hO}zCqfZ_=snu%+b!%g*KIQAuZPokq>8bB&&FU>$Gj>9@r@tiI2M$Ty z^h&AgY>;QK<;jkgpwwSTk)6A9Wmm@}*&o%ulVG-f~_KXbw%NZ5<-<_dQ+Zo3( zch89og-&~6<3ge1N1X|O-uWcYA8>NawJghA1p%`j#|aCwIDvoO-CO3Hc6ZnQ_=)+q zP$<|iw*%QBvPEQ%$R@33m&i7)W}nDLt!AglR*}6Tn?-huY}ab`i)`3xc8qKp*)y_f zWY=i7%>(=9FdGMUj%*#-JFu$mqqO+dPUv;pY@(g>szNGp(D zAk9F!fwTkZ2htFvBS=fErYA^KkggzYLHdF;2I&ma8l*Q!bCB*J?Lqp3GzjUC)wBrd z5z-{2OGulLJ|T@lI)$_f=@rr}q+3Y4kbWTzLpo+PEkk-{HBCdhhO`ao8`3zWb4cru z-XYCHx@R@*L;7bm4MaL>891R6X~bbG!*Hm z)wC4pDbiG=t4Ldsz9Nl9I*YUx=`GS+q`OFak^WjugOLtfO^cBpTTPRZE+cJ5`iwLh z=`_-6q}NEZk!~aHM*3|v4M#d|H7!SaZZ%Cux{kCR={wSRr1MDYk={H1=itYfH-OK) Z3Fi41rlh4Tn7?42KQ%Qa)jXxf{0?B7_b&hd literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Hermosillo b/lib/pytz/zoneinfo/America/Hermosillo new file mode 100644 index 0000000000000000000000000000000000000000..ec435c23bc47f925bd70754b38bc8b1d2c4d3943 GIT binary patch literal 454 zcmWHE%1kq2zyNGO5fBCe0U!pkMH+y_(rqsa)_=--uq`btVY~CCgdIF)2|NC_eK`H9 zFyY3;Egu*d3K|U4TLg@X#Tty=(*#U{el(b>dkB~r-D+T9WCBBGMiwyq|9|=q28RFt z=T2Z?`Tu|Q0tSx%|N9#lctC7M0f@MdZwP~La0r7l5C;IUuS*Ef6$lVQg8PA%fx&+u z0Qmz%gZu)bLH+^JAU}ack{0yQ&{sz$? Pzk_I6`JW3IIObdcn4XX> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Indiana/Indianapolis b/lib/pytz/zoneinfo/America/Indiana/Indianapolis new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1cL z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;jSGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4FHdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQj%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDi7rw~)s;+^(yB^D18-sy+37_S?S&{llQ*Ges;cfCo<%qz`oT2hn zVc)>(0kN_N`} z+CAthr#7fP)z4L@l}d-mpa>P;WLTaO;Tg|lM9NhWv89iUj5#SIkL}P=x=Lgwm&vTm zQ6j7Tkj@@BQ)M@9);ZB5RnDmrJ-RhPjoH0Pj(w0JycNYV_iR5gZn{_I)r5-i>B(}! zie@n}uDkS2x+#2L+vVi6BVzLFZaP2yy2`)Srl$rEse=8DdRo(FHGTaVJ>yb|Dx6!Z zXV&E^;XN&9RTqod18d|QzgNtSE|>GNlf}H&0_pGHUHEUO%cAghQFJU`E^NLpife}J zk~?i`QCXZ`+|a0&ObTkb^^97Yru4EUwQ6~ zS9KJK)pZ->nuqD4qS7zdo{JZig*kHFPDga|m`_Nkh1Z{-u<&0W&#$-N-~H>G>o!C? zj_aO3?g@3>qxDW_``*PqV|Y3}UH8UiPwZH)&l#0z?uq7}V(vVjlV#qs%y(QK`vl+L zC%C};u$^GXkKZ!?f$J8A1nZGWBC|xMiOdt3C^A#4nJO|@WU|O?k?A7yMJ9~Q7@0CM zXRDbsGHa`uHZpHy;>gUAsUveoJ9%E1J(rn2Fn=TgBm*P`BnKo3Bnzua1Ifc`5HR z-M8sm!gJRb$#6rh`R=M4-AHMP8M%0^JFnk9|LLUre)t>n!y8|^(UT|4h3DUOWBbCu zZF^tFHy;l!Zg^EDI(G(_R`kh_jT?iXZf%pFD<2FlPffealWB8hB;kHNlQ6%Xn&UEu z=a^_$+?lO$6YCpuQyv>LS!)U{=Zyw=Ri8-SbEQH4%r_-}?+#PoUXp3y zi$UQR`(6B4cQF0P3$CuRN9yYqO8vwk(@?(JH4JTXiTG_{;;jyM$LQ?v&R3q5`Jd0T z$#w0r;NW$(ad}dj`m^oA+Df^rd)O{2o+YWpr)=uVgesy$nwro((zzvk^qS+Y zt8s1EmHAqFZtAx^XWx_FqF&p3bcd{+Y_^XNJS|V0skQ6Yr{&4RC3byFLN;tQc1lcN zS=k;Q|N6@L+fRS~CP$xga{t3Cie4`_CW?;ik7Y;Eoh-x^KjVt|cS!yF8Sd6U_GftVk8kOJ5JfFnnL*@CBj*}9+sOGw z&Ny<;y?WM>^NyT(1lbg1 zSCDN%_66A(WM`1A@#@|no8#5pLAJ-M`-5x{vO~xgA$x>u60%FkHW|M^|ATQV(9NpR W%_>XYP%*cnsw`PuT~S?8Rq#9Z#I;iZ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Indiana/Tell_City b/lib/pytz/zoneinfo/America/Indiana/Tell_City new file mode 100644 index 0000000000000000000000000000000000000000..0250bf90f8fc8716a77163e7c1b92663fc6d96f7 GIT binary patch literal 1735 zcmdtiOGs2v7{Ku}X%8hMq{4;hqJ>10X)jtR1Sz65K5|l1v!|Jrc`Ow*v$F_o*CI%Z zm<-H_=tW@G!opsQ2uUNNAVy6K%{-}rLCo#^4+T++Xxn$W_jkRFyYGLz`K5bSh(9L8 zeBow_&CPqHG7n$7Tb;lARkk+HQy0#@kxx$qRPW*`*}Ee|_f<`b7wbl~-&dgeOTBWS zAyEycu95Fb68%F50ZAa zzwTGM!lyr<>{b)UF3QRBHZ`@aNq)&_RDsS&?ObyFqB)>lP5u6)<<&AdDN80#-WDm# z%5+NK8Ic;6p;K=i5ou%bdc(P9wee-T^wig?^o}^0QCOfdPlwB_ghaKeYD#8DMl0{; zm(u&qrE*qx%ABDnkr(ws=RFc)OQ1vNw+)Dbw=KHx_(QR^t4quR2;nC zugi17ggp5`SF9cvhsrPN!wa8_%8Vvm85mR5YujY?+a6UD(I{)WZmQbpy|T`CN*#HX zChHrj)X`42Y$(cA$6Dse87^}L1zULi{hB$;T)%((*S{U-+GQae=gtjRu;W~56?5$S zT>G5QQ!H?tC-+?uzy4;U$1Bz+nLFFux#rIFie&SiY`!rr`~3Oh{re*BF~7BoIQ`>4 z9WzBifmUQOk>x}d6j@SaQITb}nuSG{7Fk?md65N1mKa%NWSNnLMwS{`Y-G8uX2Fpq zx0*#qmK|AmWa*K`N0uK60Eqwz0f_+#0*L}`7`zY%V*&vpK|(=dv6^5QqOqEA7~-*- zfEXe|LPBCffpv2kuse0*$t HY+UGfYubM# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Indiana/Vevay b/lib/pytz/zoneinfo/America/Indiana/Vevay new file mode 100644 index 0000000000000000000000000000000000000000..e934de61adb342d238789c5dab9155408f8cd778 GIT binary patch literal 1423 zcmdVZOGs2v0Eh8AI`)vPgaQ|Xi(ItP@ex9VfuSOn<8w5dOpm1vrsb(@8p~`JmF1@0 z7!hU^B}F8ZVJ)O0Du@V_A_(jhnI(SFJg``(oxXax*)2_3?K(dTw>>+U1Y(_4!0>I_sO9Zd<9p3~Y@hCVkqy#lVw79#ZQ@1qeN+LM3$A7h_b~oU7pjb%4ddjMcOu1alcz{pU+Y|u3V6n z?~6sCze`p<+#sr(1F|OW6}3gVvhLWNsQ0DIV8bgB{IMt-3vY|YuWNKu<_p#Id_nI@ zjH%F#8NGXUNHw3C(0iu3RZIJ@ZXKyoBJe=A4R(pWIdQo!91!hkQQ1+RD>~*wGQ2Te zgx?g&&Si_D^KPa*F!xS$#kc70w+rfE#HSD4n^8Rt2`w*9sKbRyAL$uZN7p~oz14lH zH}OeEHpfL|_Nk0!L`8J!y6jsDi4!Ad<;f34qJJPPPfccuftCt+`jR7(JmyaJCZ#O< zN4M*a7dwvYJ{tFUUH8tYv%@gM=AjffXQ;xaJ-K_O8g zVIgrLfgzD0p;=9ANN`pY9TJ|^#D@fkM2Lil#E1lmM2Uon#EAroM2duJHL)VWT1~V_ zxK$!kA!bE@goDUnh_vFK*oR!0vQD| z3}hV0K#-9jLqW!Z4903kgAB)N#)AyVYDR<%2^kYIC}dQ~u#j;f19Se1g{7L|6`J8? T2R8W&{CU}d{Ct1DKQHwc0I7wa^g-H5PR76HXv}uY_kMXLigSL}u>G7J>tL_5D#%?Mi zqoN)m4dQK4j|dVG(IP^^smBnMGlrxhnd^MVj)la|o!tAonY2w8|L+7#sw&4ge_TDy zH{4tm=H@-!HIL}gO!wfG@3LuUfA`S-XY%f@q}w*)t881=TeojWI1eX(&?>sv?WhXM zm-Um}S4HFH>(C_iW>lJt75bgGgU9K2BQuiq6*PX`Lg~^aH!)jK`BjU`w@}d11IHE)B?8kyJ6;pCjjNh>5xB zgJf{t9TEJIkn{6Si1}{@=!F@#)WVx_y(oEHl{B~O#n1Ps((UKqWJO_)sOT(_p^<|`=$>Cz^+<@SmJGQv_DEDmChD5|akVO( zu2-LGSGDt!TJAls*5oO@wzg5N8+t>pFW#uuCtt|$gop?~zb@<2>O@`ZVY#uZL~M%g zkei?Q#g_Vz+ZTlS2&1XI-sTN*;|8(zRKITg+@Bi}gyq37*c;4mHzEsaU z+2r)H?|toaAWvtA=iR>MOPdi0I#ct_J=fd|%sngU6qxq{^BtGZJ|Xz`;Z>R+w!`cC z^;_mY@Vv5=WD_z(WQ@olkx?SUM8;_~14TxP3>6tGGFW7^$Z(PIA_GQ7j0_nWv(*e5 z8MW068yPn;aAf4j(2=ntgGWY>3?CUk5&#kb+7Ngl1}+l>5Cswj5{K0U!bK!j6ABWG z)dYh?gM@>`g9L;`goK2|gam~|g@lE~Wi^2zky%Y>NNiRU91_p61&v| zk3?@Z;Un=Q2LN&iAO`_*7$64%aws_V82BGgg+Au6$TNpUW?*dgnC#J+f!y5e-0abP Feggguh2;PM literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Indiana/Winamac b/lib/pytz/zoneinfo/America/Indiana/Winamac new file mode 100644 index 0000000000000000000000000000000000000000..b34f7b27eee88867fd131f0e9b87b4c3b4c071c4 GIT binary patch literal 1787 zcmdVaNo-9~7{KxKw8j_{#G-kGg-x|eipY}?ZJOd4UkxpFj7_yTrlvL2U6?o6w1`xN z7()p)MH)nqh=^9HsZI?gr5tKek-Y2tk0pu3&bhhwck}XI@)qC!i)7|59wL4_?d>Pr zoIHE;9_QId*^zkfz?JW+d`nmF;J#<-?zX5`J>Z+Fp4ZXTtZ5MsM}IK28MWe3zkB9! z@3}BA5@|itIE`@4QTB2OkoT+jK_K-RE zLMpdii_AS0R7)CbWnRfhlYjqY$=a-3UaD91x?xsND$=W? zFIB;a5?N4xT^05zl!aA?R8ecDTwAtTt*Z;k^~E`A!=<1s&Q4bw_Xyc0V6T{12d{s= z+P1S-pbgJ|zkL7neIb1R@|i%a@1HCe9o&0o_w3G7^z{AP*8+Vegd<{XioK`VdxpIy zM?|W9Pqm+M0`3WszmH|U{jwX&)?dG}|G@XNW1{8Am?EQ!j4Lv-$k-yI>$KyGj4(3B z$S5P@jEpog*2ri(?RX<2j*K}n>d3ewBae(dGWy8)BMBfGASoa@AW0xuAZa*l9!Mfi zn+cK%k_(ayk`0m$x_o#cA&$)mND0XaNy=%n;*gfp=7l8Yw3#8PA-N&RA=x46A^9N* zA{innA~_;SI&GFnnogT1lBm;WilmCO>izJL>jHHa@j3kX@jil|gc_WEC zZRSYoPMbTDJd!<10<-vRyIp;h_mb5(dYIladmW5(anr2@}h^zW@LL literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Indianapolis b/lib/pytz/zoneinfo/America/Indianapolis new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1cL z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4zSIs{=e9qXod%nNe zy2Pep^PhXr{e=&=+kN<+Del*|53Q0r1+U4-yk0Z0TXQBQn(gGZ5tCWiy0)}-w0&7Yb% zCEwX6Rvb5TGe5DxJT<}5YnmI_ZgPh&YTosjd2-;0=3m_=q5l0EK2s+J9dRvucYzee zJ8kifDKbB@$u6iHk&@u^_Nn5pWZ}3xTbgypl>YLFExY@rDZli)efsMQX3@!jK6Bxa zS={rRE;+E*EN%TvmvtVHZCBh_ zYN`)@s4G8jlxTaGt~$3^Y8n$-+m|CREUMPJWSXqb4ry%7j}rTHg4UN@lKP+WZNrQo zOvB)KyLNQYtoz`meQEfpY3v-bFAsE^rslJDeg78oO7x0u=<1SJL#K6PJR!{)z1k9~ zmX;eyjR!*#zucgSaT6qQtXwz$d`q_Uy=b>y8E;-smf1H>+%&EAS$12`ka@G@FT1_< ztl5!y&F-w}GdoA`YO?gSB!|DzT~m8y*TBcRdn74s{fG6fFB+u1<89q@zFaz*T6FJw z*<#Yhq&+b9fB124%Uvq<(feb@rcym((o?BJYBKy^CbqS;nc9kqn5kVIGf_V7`@a{v zhgUrE&%dSI@tTnrjl61(myNt`kphqkkP?s@kRp&O zkTQ@ukU}^rK}zAM1u4esszJ)}x_Xd;I4VL)LTW;aLaIW_Lh3>aLn=c`Lux~cL#ji{ z^Sb(w0y!!~O5~^!DUzc~q)d)FkwTG5ky4Rbkz&2BTBKaBs~0KQ>ncV{=BODd8mSs7 z8>t&99H|^B9jP5D-s`GI%J;hZkp=L&6+o7NV-1i+;8+D@893GfSqP4mK$Zeo3uG~n z)j*cR>(&EV5U*PiWJ$bkO^`)FRs~rWj&(s6hGS)rrQui`WN|oF2U#AkTOVYByl#b% wCGxs8LKX>GC1jb9bwU;jSt-1Kmnz`KYoQyj{OJ5}akwx)T2vG+3Ks_c2FYB(1^@s6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Iqaluit b/lib/pytz/zoneinfo/America/Iqaluit new file mode 100644 index 0000000000000000000000000000000000000000..e67b71fe7ef70efe23154388ff48e1708ab96dcc GIT binary patch literal 2046 zcmd_qe@xVM9LMqR5ve<6jTwcClu*$j+yQoqMy3I~opwTcn3CyR!)_-OpVV%68PaBR z`HyopY|F7+KjvzU%xtwbmgTIqN|f1JHTu*Rf3#>Hqb(NOefvB={rIPU=~Q(P93d9%jN{JhyR|CgXz@ZGqST>42ZJas?^sZGk$BLykl`?g^f0re(xVdBfHxjK_Y?jsy$+DRRrhdU!x_&I@EuZ?tthjK= z-1+r@ZW#Po-Su9dUU_6#-TnLyy{h|7wff)zxu@k>)!4CJnj#t1+|($|6Q@k9dYg`o z95(TS)jEEr%d8nM*K1Eaqwf7;l_d6es&#LdNz0amYVFUL`x+u@{o`Y@p`=hH+dh`$ z)P&j?eN{I8aGlvydO>eGf7NWxozm?ujhF{U59=)l&zc8^y7bnL<7V5yIxUH})b>N2 zvZK0RJ(NmFM?sI;8H>oy@phG}D3sJ^4JtijLej67s$FAOq_cm8>AHATKawdij}DIL z?zWs!N6zZUqT1~4KCbr^ePH&s?9+R5->OV~zhp+wsh;8<=^1)a?VD_u?7-9Ni7y*u ze{V`X`A(_yZjGx0&w1iq8_2sRFMoQU88fH%-n8DbKQMdv{zvZz@_ql^H@sQCe`Po@ z$2}=@*COA4?FDZhe+?}B`#EoBvstgXHR&~PuxC>f7kl2H^cwAhMtkoC{``7-{pT+E zAAbImfBm~}4>CYfaM~P@B#FC;M}GbA-6H>XVw$Ngc`EX_H5?ciQxk{GE0JkQqRx0GR`15|CLy zrU97;WFnB6K&Aqj3#Xk7WHy|3I*|Er+6h5s1ep?KPLN4KW(AoRWL}VoL1qS-8f0#e z$w6kvX{QI7AE%ulWQLq}ijX-%CJC7(WSWq9LM95CDP*dUxk4rjnJuTCE@Zx(cEXSu vbJ{6G<_wuMWY&;rL*@;cIQ*Z@T+ohU)b3$bVoA6zTvL^(tqs?PYl43OC1XWL literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Jamaica b/lib/pytz/zoneinfo/America/Jamaica new file mode 100644 index 0000000000000000000000000000000000000000..006689bc895854db5deb33a29ff5d99c6c205fc6 GIT binary patch literal 507 zcmb`DJx{_=6hL2r1ZXSNFAztZ%4p)iu2F+bbkV7cm~LI2B*Bj`IO*bQSQtzk7-VDf zH@Lk&VDd(`$34|e6BloD-)WjQ={eC^(%)4-rXWvPrY6gMFp%S+I<>|7Mp_QWarx!Z zxaKzY^iAr2JnNPDMOv9%>*{1^f~US-9i5xC%Oky>9GZ>Omfk!LO|9+gdgz(0W*9e0 zpJqGf#ZE5&%Vq7Gqf~Yltew58lC|%1$3N*tsv~>QliiQxpS~vg+oUdUW+sd8Z>&sm zw{rzC<1uy+!;E4HF@@Mdj3L$#bBH~p0Hgw>1f&L}2&4+645SXE5Tp{Ml=`1kuC&{8 Em)X2_LjV8( literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Jujuy b/lib/pytz/zoneinfo/America/Jujuy new file mode 100644 index 0000000000000000000000000000000000000000..7be3eeb6d0426ba1d1a2a6963a6234742ea0950a GIT binary patch literal 1145 zcmdVYPe_wt9Ki9{oVrE_Bd7=oA;nym2Wu^|;E^4KXqPz{cJ5J_z7 zd#TKg+;8;vn}5AOoJhFIFXSe zV?{>mmGL4Y_R5%%Q6u9 gD@h_*B55LdB8eiIBB>&|dL`N5KWR21N$-yQ0H&!1x&QzG literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Juneau b/lib/pytz/zoneinfo/America/Juneau new file mode 100644 index 0000000000000000000000000000000000000000..ade50a8eeaa1dc389b4f8d421d83080792de7c52 GIT binary patch literal 2362 zcmciCZA{fw0LSrr5xCqd)GZ+qiH=Voj~=e^RFt6@Arct3n-Ezfc88UqFovR}B_p?r zi){{P(Q%Hgw$wA-O4mZmxwcG3u=Ow(rCYO{y4KTd6u0v|wDlt1=Kt>eKZpN#`}_Lq znx64F{`0ua7YKhffL5PdYU7yQ3=h>+{~Y(|zielSjR`zH>x) z_xE{k+Y=JEZ*BF?YT6>=YuAf}a=*BvsK7frJzpfwuL;d@CyJzqyiihRyf^9R<$CU= zMJjo8p}zBDmr6Mnt?xScqY~{!a^9|UYJOm@T(I#6u`us1xoFj=A~oS#nU?XcNSmC{ z>7Lg_`uI0G<4TLj96F`5#$Hj`0|#}^@l9$;*GpQyUafM2O8Q!TYH8Vv^6r{=m6z5i z^L@Xo0@s7GFnN|Jyu4TzP5dVA`NSjdz3`g3^b@jetwWc_zA8AifHOb}LA8;uDpP4$4cS!}le@L!P35&I7Gi2@LMNu~xD<2;} zBkFfwmro20igk@6^2vc7@l@qi-OzJLJ)J$Q*9W^*W1Q9-%7SXcwY@r+R<45Q+jNsF zQ#Bn9=#7`-)U*3q<#Qt$;`#PRX>K?&(`gQ z!>WD!58aWdRmac;z4gXkwQb---T76U+TQh+-f<$Jy4LN|FZ35GXT%I=9PDf-o{AS1*W8bm%?d7{O-uzFCBhlWI?S0KZTo@yd86+@DWSGb}k%1y3MTUxu)oKQdjMi#~i;Nc;Ffw9f$jF$HK_jC^hK-CH z88|X>Wa!A)k-;OQM~07#-)aJ2H4#8UfW!a^0uluz3`iW1Kp>GoLV?5r2?i1kBpg-~ z4gNNKBBRAW=cWg2V+03=$b6G)Qca;2_aK!h^&I36Rx92ni7qBP2*jl#nnX zaY6!xL<$KN5-TKFNVJe}SxvlVwUxvTJ(v;m$ubN&=(eTmKJJj-<7(p4{=C5>X z+H_FXNJ~v?j?`>gjntSqTbVMQh2aAwm5DxPE6cpM=Y1;X)*t=XKb^aCUeC^8@XvjJ z{P~6RGM#^1(e@2D*B*OwP7K;d<@S`|tMC0Pn^$!RZrM~PTZ?{iwsnn{V{zky$If`= zljlOglYM`Zle1#X$+`+vyER5m<(+cs4i=fS^%cRdcCMHD>eAr3HH$@8P7gK|`lYdK zc<}4oLGn#O@8J351ZnbSJKqlWnD3H2&iCCMbHV=g3qLoSADSASi>GT$^N||o(uWOl zxk?9(XTAJ>W|+Bgw3iO;Pc-2-!rG|_nTXX5y491~w3d*HR5zr}M6IGmoRjGE!z%ju z?h?~!kHqZSXkyJO)lRRJ_FufF;|>O-!+{mLnGmA)gZ#@spP1$|eS?Ix*TkxmMIB+0RPI=OzE^t&)v_pe?pDYbqzpt4X> z_YP8NWn(3MLxRc(1We}Q-_$^FuDN^CS(TO4-wevER)eGC%#fa6>LE>+&Cu%~=wU}r zn0x9fb#~Py8UB7y-@EQ}x$ouKdc>kVGIGT-b^nBoGHT8&H9D9*IahZW?|_-w zTf5fe#*Ngudl#B9&3*OQSJue5(<78`=>i%5ZXY#ahEFDzN2&+2(`3>UO=@yty!i8u zD*yGXG9~LxHRXIqGu3lMPp!FPriFIs{H?WSdi`2GW5qu6aP>l6FsH)ItQ@bE?;V-7 zdV!ifpj;ja_|%-Z5}E5wQ*)d1C6E-a0-t6}VboPsxXUB+n@*_(<-^Rv<5%>f#fj#z zs#?7$FJxrhKK*!>Hj5Wk=%Ox%&5{YFdP(S<6b~s^#r21zB%wr=RKG5z*Yed(e^2<@ooj7TZMK^oh9XeDnMN>KqPNZE~D&_`ur{ zk>T)Lo1M1qxxM@B#M9{<4u5|TO zf7t!zU;Fd7?0*mr7qkv-MoNIx04V}e1*8l}9gsq}+Dag$Kx*M?i{Wakfs_NO2T~BE zB1lP)njl3%s)CdSsS8pVq%ufpklG-{L8^n42dR&%Ef7*6q(n%KkRl;fLdt~H2`Lo1 zmEu6DxNNO}Vj;Hhq|ivEky0bIMv9G88!0zZ zZ=~Qz#gURDHAjl>YO9Wv-PP6|DZHz#JW_h3_DJ!O>LcYx>W?e{vI58wAZvguf~#Ex zWEouTIv@+-YF7eT3S=#i#XwdASq@}9kOe_j1X&VfO^`)FRs~rWSGz9A!noR%L6*kV zt_`v{$m$@=gRBp-K*$OqON6WuvPj4(A}IAw3X~;R9j}q4=42gfe{GEYq#I zDWd%u!fcIdPLgO^i?lU!{aGTb*(`dxh&4C#M|5Ff=jr$Ub=gH*>#pzFb6yWlHf}y| zZ++_%dE##us{e(D>(vkMiMIagelB_HVbdMoMLNPo14} zaEZ)q@rk^xVRBh{t+;3H6`7w}DDKVsLM{(Y6a|ZaRs~ajv10BsRd{Y(-FLa)DLQdg z-v5E^Jh1U3zy5^G`_DeEJKQOR>z*DC2zK4K2Sk{8k2K#guKVek zfQ7t^_|+b-C@Iy?I{iGVpDM2?(?6H#pSb|@4c>p=*dhI&&Bm_(^;`NcaNUL={{XT% zWOvB+ko_SWM0RM@TSWGVY!b~bd10HJ-Y3UKk)3jE71=AYS!B1!c8z+!$cBx2#~fQm z_Ka*A*)_6lWZxVcM|RG!b!6`xn@4ufv3+F!91TD^z|jJu?g7#SqzgzJkUk)dKsv$E z3Zxet%|N=r(GH{^91St*jvy^D>YgA?LArvp1?daY7^E{uYmnY>GzaMpM|+U|a5M<% z5J!uQx(mterNCSPNFw$b9?lID2qwX@& zW~1&i(rBd9NUM=vBh5y-jkH^sTK^B?9ie-kue+Y%S(cTTm6PGg&CSZq%8B?BJN%qm literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Knox_IN b/lib/pytz/zoneinfo/America/Knox_IN new file mode 100644 index 0000000000000000000000000000000000000000..cc785da97de0a5614613f9ba6e502d7dc5f525b5 GIT binary patch literal 2437 zcmd_rUrg0y9LMo52q6eaCWd5SS}_9&Y$$E8wFP5`#6Nk!KM+Q0h-E%1kf_n))+8^Q zrA@&a>LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;jSGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4FHdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQj%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDi~o!{@yMj<`N{II8b;;h4Pcf#du-7moku zKOiS@BtWh|)^TmM%uFco|Nrer7#M)$8W4H> z03*x)|2sD@aQgU$Fa)>;gNP8Il@Jg@*r9?zm0*Ydfa>`V1exv1dq6bE;UF61cn}Tp z0Eh;80z`v60-`~l0nW literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Los_Angeles b/lib/pytz/zoneinfo/America/Los_Angeles new file mode 100644 index 0000000000000000000000000000000000000000..1fa9149f9a9207a9b9838141088663ebe669f250 GIT binary patch literal 2845 zcmd_reN5F=9LMpCq6kW!Oq2-iq$YxjfTAdt`82>pRIVgu_>jOb4HZHyLt6A;%{b(I z=!w3PYq>IX%w|!9Zn;{s5}NZVB1>f|sc4Bx_jcar-~Qs*Kc?5=Y4;?3kvcQ zJpVY|>^EG_XZG^mx6D4O-cOZx>%xq@7$ZC1ykWlG6d{d+udixcGE^P&73$-Dc59yMJf5rn`Z`tl4y0)R2QkXCBU%T% z+)H&?*Hd?0JDf{vy-plap$(OC z$EP)__wJ9idZSX^xyk50>xO2QNRVh9q9r_s{rT4GlZ0qhhL5 zl&?*qL&{Wi^Y;>SVW}EkzfVRqm70-NTO_u2u^CnRl*DbBV&d~(*9k>K%;=P2Jtnie zNsP+UV-s4J>jks+A=v`pFufD$I)3t14R5>ajibn!-b>D6CBvXY5kN z{$MFdYA_|u7iC>|wOLnxMAmndo2RR43U!RgtnHbwvt`R2C^MQd^|BNOh6& zBK1WI?6eg|O6;^XMv9D787VVTXQa?brIAu2wML4KR2wNbQg5W-NX3zoJ8jL8q9avD z%8t|>DLhhnr1VJbk>VrON6L@XA6WpWT>)eXoOTV6MR3|xK$Zbn2V^0Tl|YsPSqo$_ zkkvqz16dDbL7a9)kR@^2H9;1|X;%eV7GzzJg+W#ZSsG+*ki|h(2U#9ueUJr0RtQ-l zr(Gjtk(_pwkY#e(bwU;jSt(?xkhMY<3t25>xsdfj77ST2WXYU%&5%WN+EqiA&1u&S xSvX|nkflS`4p}^8^^oQBxUKu&O<#yz#3Z|nBhp95Cd9^#NRN+?jgO5B`5OyZo4^17 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Louisville b/lib/pytz/zoneinfo/America/Louisville new file mode 100644 index 0000000000000000000000000000000000000000..fdf2e88b48cecddf4eafa6d8a41ba7363e4874d6 GIT binary patch literal 2781 zcmd_re@sVwUxvTJ(v;m$ubN&=(eTmKJJj-<7(p4{=C5>X z+H_FXNJ~v?j?`>gjntSqTbVMQh2aAwm5DxPE6cpM=Y1;X)*t=XKb^aCUeC^8@XvjJ z{P~6RGM#^1(e@2D*B*OwP7K;d<@S`|tMC0Pn^$!RZrM~PTZ?{iwsnn{V{zky$If`= zljlOglYM`Zle1#X$+`+vyER5m<(+cs4i=fS^%cRdcCMHD>eAr3HH$@8P7gK|`lYdK zc<}4oLGn#O@8J351ZnbSJKqlWnD3H2&iCCMbHV=g3qLoSADSASi>GT$^N||o(uWOl zxk?9(XTAJ>W|+Bgw3iO;Pc-2-!rG|_nTXX5y491~w3d*HR5zr}M6IGmoRjGE!z%ju z?h?~!kHqZSXkyJO)lRRJ_FufF;|>O-!+{mLnGmA)gZ#@spP1$|eS?Ix*TkxmMIB+0RPI=OzE^t&)v_pe?pDYbqzpt4X> z_YP8NWn(3MLxRc(1We}Q-_$^FuDN^CS(TO4-wevER)eGC%#fa6>LE>+&Cu%~=wU}r zn0x9fb#~Py8UB7y-@EQ}x$ouKdc>kVGIGT-b^nBoGHT8&H9D9*IahZW?|_-w zTf5fe#*Ngudl#B9&3*OQSJue5(<78`=>i%5ZXY#ahEFDzN2&+2(`3>UO=@yty!i8u zD*yGXG9~LxHRXIqGu3lMPp!FPriFIs{H?WSdi`2GW5qu6aP>l6FsH)ItQ@bE?;V-7 zdV!ifpj;ja_|%-Z5}E5wQ*)d1C6E-a0-t6}VboPsxXUB+n@*_(<-^Rv<5%>f#fj#z zs#?7$FJxrhKK*!>Hj5Wk=%Ox%&5{YFdP(S<6b~s^#r21zB%wr=RKG5z*Yed(e^2<@ooj7TZMK^oh9XeDnMN>KqPNZE~D&_`ur{ zk>T)Lo1M1qxxM@B#M9{<4u5|TO zf7t!zU;Fd7?0*mr7qkv-MoNIx04V}e1*8l}9gsq}+Dag$Kx*M?i{Wakfs_NO2T~BE zB1lP)njl3%s)CdSsS8pVq%ufpklG-{L8^n42dR&%Ef7*6q(n%KkRl;fLdt~H2`Lo1 zmEu6DxNNO}Vj;Hhq|ivEky0bIMv9G88!0zZ zZ=~Qz#gURDHAjl>YO9Wv-PP6|DZHz#JW_h3_DJ!O>LcYx>W?e{vI58wAZvguf~#Ex zWEouTIv@+-YF7eT3S=#i#XwdASq@}9kOe_j1X&VfO^`)FRs~rWSGz9A!noR%L6*kV zt_`v{$m$@=gRBp-K*$OqON6WuvPj4(A_p;gBAU(1=u^MwI@M11OpUTiJimNtomsUkt@Algnby}A zR^>pAv3v}sgSvl$&lHQ>5%zc Q&4kE|_zzQh&8i{q7aIB{(EtDd literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Managua b/lib/pytz/zoneinfo/America/Managua new file mode 100644 index 0000000000000000000000000000000000000000..c543ffd475e374b39c6800fcc69c3c06b6eb5dd3 GIT binary patch literal 463 zcmWHE%1kq2zyNGO5fBCe0U!pkMH+y_UR{qDY|bSKoJ;Hi1OtCO5WJEfAZ&Q+f$)~V z0L{dS3R(xf1GJs*J}^4@KENsBzyoLd%m9~=yareK$OJ|vW+oOOWQNlJ|F4%}U;vU5 z3@rcuuU^2w@&EtM4GcUWl2HI6;^P~_;0wgg!66K;Kpg%!0$gdz8Z~gA-NvGxFsiX%W9 zEl#Vbd?OoP-84=+bTjNzANjlt>xmB%g1#96a)$c1tYElp`gTd zFccgL5Cw?>MZuzgQP3!G6uh_&kb)T3K~gX&pd24m3M>Ve0!%@sKvS^kzkz$se$x8_ Dipc^V literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Marigot b/lib/pytz/zoneinfo/America/Marigot new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eCW~TrDkL_V#0Fx~L{~tfV z!1@3G)eDTgKE5FgZf-!rF&KzlLO=$AK?q?BS%C^cmJ0rWXaO4k|9@t?$Po|?asWse T=Sv6kviB!Y)!gV2f81L0 z4i6VJ55KQd=Eqp{UEP+7uVQP<6YUN~gr_x5ru%|oSJ`Kok(wnkJ+I{M*d*cI{7HKk zSH+&6FLl=U7_~P#t+OXqRL-0)L%p!d2wSTdsW>ccXBXRkVxUn&o8 zXjHy!>C(4+O%%i=%7VEDQMm9!9=UT%70nh)e|M)Uj;6_ynrd})aHA~E&QWFU-(-1G ziV9TB%D{?4Rc1`e%8$t+nEX};UkGu0HL9ydKZxr2Azjn^T%4Hd(zW&X#L3|rUFV-x zr}{hP=^dl$OsHDcC-kWXUyf{8YEq%p6d8J5sG4IPvU%L8S{4`N*}(#FZpPB*BiqG= ziT65OnIPoAGkr1Ri?|dX)2)ebL|aL}Zd+Ya5$~vs%+IKfO?|Rs>Y?gfZjxQYH`V32 zLfPHZuC7cvWl!xfb#-XH6pl6jc z7t$Eg8PXck8`2!oovmq)qd!~IAV-Hti%5@1lSr3Hn@FEXqe!Pnt4Oc5rdgz0ThlH_ zzqY1fj*gL*IeJE#=I9z}o1<@}ainvkb)~I_t=Ryw16#8NWDm$DICg<- zgJU1aMmTnYY=vVl$YwZpV{5j9?8nw@2-%UX*%GoRWK+nlkZmFRLNIh;V}G;lj~TZAhX0IRZM0$AQ=|xcrbM}x z2uqXfl1q6dmk?UHq$JsrvZiH7$MZRq7hZYch41M(pVO&xUVT2^++u%}{IS9I4Tt5~ z!}ADjZ)e-OD_VNXUbp#I_}Y`7_&S1T`Z~Vv>AC;D()X?YYGDg_t>Qk0t(ckePe^gd{Y6F!PQz$o%q$ zCUN&Mos@CeB(Ja2DdC6Af|Pu{a9FiTosy_i`ufDya8JVOpVDmlrPUr=_S33B$!7;xk3#KS@z< zlG*sZQ;MHVF`Hhs%jPSCP08)2y0od=l$~qU<(2nL#hxR&GPg@@@z?6AxVx%);cC5g zY_r-HUL)JS9#A#Ia;4@)xvKpbFWawWsvVCe$j&pRrmjV#epiNRIQT{+V1RoK3<&%O zm*X6d7jc{uMgkqD`LqmmoJK9dJO`+s@6$bA@nm}?*`8(gcv8I9h2Qi3g+=|pK6C7_ z31Sq)Du`JSyIfksAeOb3-kq|2xWVLZfohWQZt83jNp zV3Yu=m(pAkqZC- literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Mendoza b/lib/pytz/zoneinfo/America/Mendoza new file mode 100644 index 0000000000000000000000000000000000000000..f9eb526c7bef450c9726e79ee8e34312648a0e3b GIT binary patch literal 1173 zcmdVYPe_wt9Ki9{+;oi&hR{DGgcNgKUd**1lxOTG9C zEH`XcpKQ);7vy#0O2)r?I;HMyd8VhgX4Lf3h(ELOteKgb)U);5YVPU_|NhIj=E143 z&d+XAW;mlCj*qKHdkTKxT**9ctLgcJ0X4rG@r&`8DOPUzPcLTGLh`y^_;ATACau4y z-5=FZZhE$$dK1aXhyzJEc2XvboOKC*Aq+WUgy-uj+bU(A!szm~OpSM=Gz?j%!2h zvDy>8J0H^3 ztP^8*l~ck(;&Mu0NMuN8NNh-ONOb7J;~??z^AaEu wArc}IBNC)jqC~=UN}NcbNTf)pNUTV(NVG_}NW4hEPKnt2Zz>K;CzK^IB4RIw&?gHJ59n{ zJ5}PIW^>bX&1zz6g}J%0LQSg5H%X=W>Xx}F=GM#pn7f<_)X60(;H$r1wrCcIzn*cv*kld9q`kwdCA)j0_VpLZ%(g1+@0u<-4I!1gAzEgYRI0m|o|n9gd1`j< zH!{bQs`97&X7bO4)!eIJnR#EHQTLpB$IL%4rVBoN-rUkw(3QP{Zd|2 ztrzc_C`;xS=%pUht0~~pH=n9Zd23# zxq4`DvsqQYUp?GiV;(8{RM&2AmDSn3x~`#G>f<`}nvw!pGrCqcWTZ>Ow*lSgiIT=X zuU>cV7kO+~se1fa#Hkh^^Zmd5L?V5s zm5M};9E=(tiM+i}MZ514+&huCnsn{w3(JCPL6Loy*=L1)76(Ak3?!WmewJ&LZ*ex3z--)Gh}K`J2zx<$n22mA@f5f$T34?icUL6WRgxh zOO9zG^F$_!%oLd_GFOhtBD3X~E;3(^2_rM+m@+bFj!7f4=9sqA&KsFHGIM0=$lQ_1 zBeUn2J~Drf1RxpUNCA=qjwCp37LYVJZ61(BAelf?f#d>729gaV9Y{Vn5`tueBPB>q zIFf>7g(EFan-`A6AerGv4U!uqIY@Sp^dR{`5`<(3NfDAGBuP%2C5|*XZJsz1<+PdN zNEMPRBw0wdkaQvWLK22#3`rT1GbCwFn>8eDPMbH5#5rx|I8uk?jw5+U_BhgqZG@)$JZg!Rj{}Mzl;t9b zB#5FHh1Md;Y#~7|A_)qqgd!*+YSAf>q^#+Dw_3Go5%dq|d@~pBYQE1Z@%s|QADd;K zaG2K|zRy?Yx-d9V9?>;kzQ~5;~-q&f9!D`LmUA=Z-TCE%I(doxusP%nPXVi_T%+6zSL;gACtgMq+v7IVAt5D{I z`IKu#qI6Alh}__CnftU#Y<%@u=G}4$_t--1ITbB7U7OVT&2vS;(I>jFB1#q24(Z~| zIm&YP>CKVv)s~o6y;Y1UZ}@5HeL0}EO}5CAkzTcZyj1SE(l2)2%#fwsCq&uF<+9x0 zAS&9@^{!k?>@JPemC-4pDt(6bEhtje%UP2J$nnU7i_eIrSif%i(k`0A@^te=g*fzKr9OPy zDULi0(=F%ARBQi7-PVz*+It>r5i}!c=0ErZ0#~PnF!xyz90&|u7a`n1^L{>Ydo1BB zwnVl$&MeFP$G>O&g&R}9UJ97u2tV-z;tIqUhBFXv816v)VK@Zwh~bh=;}gRvh*uD| zAbvp{gLnpU4dNTbIf!=<_aOd39E5lXanYvnk>R9G<0ZpQh@T8cA)Yc^h4{*F7UC_$ zU5LLBhanzAT!#1zaoVQw8sav@Z;0a%&l#>md}lZh@t)y6#D7KukPa9vKze{QVbgR0 zX#>&+q!CCbkX9hQK$?Mc18E1+52GPSM~s#rJu#ZHX}V&xWz+PumYc47bk literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Metlakatla b/lib/pytz/zoneinfo/America/Metlakatla new file mode 100644 index 0000000000000000000000000000000000000000..e66cc3417a0d82b9ab182e793ed3ec09f6d7b9be GIT binary patch literal 716 zcmbV}J1+!L7>3X6jU9`Fb=&F$Sym+yw@QRWBqYR$&}k&wxil0Kg@i4{(NRdI5(-vH zWqYxOLPF&SC=?2n@jas?3g5~5J!fXBd0uZZzuc>SOkCa&rcJ^z#vzOA5jVcpe?Xt0@PCq{5+QdlGujqNT75k%pQkQM=;?}rtSGMY< z=%=3Ub#>{`uemAe+Ve}Jj}LWS!J7Kbb={C(GwFpn-8kWyrlA4doa;9&ZL_vD*T1CSOVO+ea!Gy-V_(hQ^>NJEg8AWbQ$Di(?Vd6at0V(=f0eOD&i Il{yFAFY4k$rTmW${i zLKM9y)E4>377-*Eg<(Wx7z0I!EjkL4^tzqzQma-$oBqSO-(}!3o9}ZKcs)_#k4-ke za9Odryw6hRakXPuwvCnhIy|}V&XfUnS8$xW>-%y2=xw{Z_gqX?-{w(Y-^fr_zkjc< zzcV*$@It8XO?CaV$dYOiRTU_s9r+?AXGF#>P7rZXPvr6`^F;jIVIBYBhgk9Tu}&Bd zP%Aqg>Q!yu)asr_op}1KTGK3bQpE$6+;B>+&AhBqc2&sKWesXwYPMV-;!%!8(b6$d zFVX@+W!kIVV#B-7GQG)V&htye#@pjMvu2veI{8v(7cEvfW!-vH@>J!@Xx5t- zd{nt%bvjQxRr#S8WxlUXZ5gkX1rM6k))$3x+s#(7{ce&hJaIyoh;wdZ@U_72Qedxvi5(mNBXtoM=J*EFcguU(fF z`|m4x{Dj7`Kc{Cn zUBbD^B~rMl=H|JK9>3@M3#}%8ePo%T7eq0LW)RgNy4f_!L9~OY2hq=_Q4pe`O`{@2 zM~IUAEm|_vgy_jo6rw3ZRfw((Wo;U58R|mxg(wWs7@{&nXNb}ets!bd^oA%7(Hx>W zM0betHjVZS^=%sc83{l#V59)afsq6x3q~4{JRpfcGJ&K5$pw-OBpXONHcdW|gdiC~ zQi9~fND7h_BP~c?jKmJ;L9OX=*!cE-L+<3;y69;hM4)E2J51;HL~EvSQXwGk%hP8Riq*y za^izpG$Toh>mRGd$zLT}a!p0Qzm(FP!>Y9Jiz!RFs>>eqn5F%Pboqr&v#hsPSL|;y z%ezFcsA@JV+k<*l__VBUji@!gCaJ6rsH(&YSzDN-)_%&B>hWJy_0v>|#lKXs>v6LF z{av-;WQy7NtVh>0IHtCvQ`eQeHJgsK>CHJ0%$B-ly><8{vn{e*Z|lD=4WTC0(0frD zM^vcBu2$LcEmt+Q$E5jHs@mC7B)jg!sg@N!*?p={IRoMb{*#yMJ}Gk?*L|d&L9W|l zoFT6J?#+P21>vYOdvVl}u=mK1+R^+v(Z875x8MJ9?O}3!Wp>E)koh4KL}rLg5t$=0 zNo1CucACgMJ?%u1nIcm~=88-fnJqG1WWLCRkr^XXM&^u68ksdRZDih_cH+p)k*Ond zM<$QV9+^HeexB!gswq=V#xB!pyyq=e*z zB!y(iI}3Ma~?qnGw~>bW}u*EBv)?d`}8ul-u9w%_dK6x&+grQ_xmQ# z$$C4=_K)Lj-f(fwnTzMZ56$hv)&rubbdB6`=!z&CAFImt4{;psAEXX%=;kPIcdH|d zZ#pV2*Quj3e{}p%R-ukXf9*KF?Q>Px`xD2B+%|R6@2un0tF7u(^B%|Pz8UIt&6$cb z1=nTOkO#R{vRPH<)#wY3bLwJBsjeBcPyH0LRoB`+RhI%>`jXC5KX+`Amn&weU+PxM zE4!D7y7F{+wJ=raqBwaiD^^_350Ui=LE=V+muwj77LD-(_03*^>Q<;-|JwJ4y6s)B zZ$CVterv4MO*LPs<`c#G&XG!Sch7OzvZGkEuG=N=ttu4va|+~xc?-qw)3W8mxFqq% zh(y^oG)w*2GeSP{nW`SOP0{VQ!j-!=Og}!=Q+1T~(4OK}W!v1M?Q5%5mt|GjE4M{- zm3L(~Q6+o^mB=UjHwjn?2koIt6K(NyDIvhuanOlJt_w5tB?bC zejx_s-P0i-tr3G$F6bfIJJqu>2lR7OK32~Me5ON(FHoT!-^pRV@oHGzW;y)!F!e%t zx+|=DxHG&c&J|J8-8mvZ#5HpL9cN_b1b5UT<&2IW=pHrWTW3tD-Th+J2Ipw+dUtH_ z-lo{bO81!d^-W_>6uZY=UeYvv&vDm;vec%R*6nh|ZHsMk@zpM!;1{hDN;346<^VNy`%0Nu6D5*d>GHJ$eZ;i6aZ+r2 zD5j4Oku&mY#mxR*GCAdtNN#VCsWIC{YJ*+R9#E=gAFtPG9b46$FDv!ybuKk`b+Jw_ z&rlij3-!DrhmuLVBEjo8yv3f72N-xSOREq=l>m}0`swExg<W*){i0=ZWo?9T<)_IHO8rHC#yGiZ^F3kr>e9)lYqw5bcJ{^p+B=Wu z*d&|H<2kt3*45*=sO&x-&&di~ck9{Ty7x&;O14d&nQWUl$y_+gTq4nMq_Sk-0=B6PZn9I+6KACKQ=bWJ-}awKS88%qlXi$h;yGi_9!CwaDBelZ(tQ zGQG(BA`^_vFfzr+93zu#X=WLjW@Mg`iAHOtd0?tJX0CzBMrIqCZe+fZ2}foenQ~;# zEzP7OvyMzVGVjR5BQuXoJu>&mOg}RJNCJ=yASpm{fF!}vWC2M7k_RLaNG6a} zAh|%2fn)C|TB$G%gkz69lM6!vb6Uir% zP$Z*BN|Br*NwqXtMbc_%@`@zZ(qtA%Es|R#xkz@A^dk915{zURNimXRB*~U0%Sf6n zO`efNTbfKGsYY^*Bpb;#l5Qm5NWyLZH@wq^`Lb-5Xy29 zL=yC%7lqa$$t;i{7m)--R6-FH5w++ulBCz|o@v>tMfAITb7n5w+1ztHrG8(6{IMDK z9S-x_hwpQ={c(P1q9UqfQkK--G0vJ6NoWpLi7u}s6}?x<(P@&B@IWn}xkyqMzB8$B zf69t)_f6VVh+a8#*Q^@+u2+xrn)IVD^_qTVG8#s8X7>@bw&1MJs%lW#aou`dcA?4% z_i5MCMCF?9lH8C8mHVt(*1!Iu@@~2$e|(;CpNNqSSEo!t>ulM0_^BzZjMhc*(~GQ8*x&3Uk&OlQ*Ekrv`=q+Ql_?D9+2%fGE`a5F)2SDr7HYQ zQrVGicI0|wXIZ4Fib;{`^kCzgTcm3izcDqRlJu@0W5)kzzOH?7$<$q+*7ale)b73s zz30M3)zElbt3y4ivGln#`RY`2+KBAUajSg`PDx9g-?V(~l-BS((>hTp`#&x-2X1A_ z!H40d?OeHTADA>9U75PG_py^w@0#y?UU`X-zoF_ zg$L8WAF=<@vhabQIKl7&;s(PHh$9S7Ag(ZcfjGnPCP3RAhCdL8ARa+ng7^e+3gQ*S zEr?$b#~_|TT!Z)qaSq~LfVO)K{{plfWOxX1k>Ml6NrsmYHyM6H9EEraaTVe##94^9 z5O*Q|2537B@fhMV#Ak@p46h+>GyH})&hQ-KI>UE}^9=7H?nC?!&~5;50EAx zT|nA^^Z{uE(g~y$MlX~v$IRF Vvx{{nIh>C8Shq9Yk?e>M`vuR{Xd(ar literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Montevideo b/lib/pytz/zoneinfo/America/Montevideo new file mode 100644 index 0000000000000000000000000000000000000000..ab3d68076d1e4c3f3719884e1e27b3c94867aa8e GIT binary patch literal 2160 zcmd_qTToPW0LSrN0=cB(z%*qRtQny~+fk=xE3o5*KxFNd3vp2~FOjAeLYrFxIZ4iB z4~-hsjK$JzplIe_M6*sQ$FKk$>OY~451LjsI*z2N6Al=s|My>v4?XnKTj$LAf6gxK z^Vs-fickF0^tk)!Mmn6*9Mfxt(`r zfn3l&*Us9{<`H1zawRqBLGhj$7Fx+bv zP58trI`xbF=!IQY@quo;LJ-kasBaf{5=H(iyh2Mn_!YNA!qmt;Qi3J=>)uiYyv@BYl*nBOF;Zg}0Up1ws^k93&9gq1S*L!>yG2_Z8=ri9E1nG`ZBWLn6)kclBPL#Bqz z4VfG=J7jvu{E!K1)eMm-B6CD0sa3N?rm0o)L?)_LGexGVRdYoqi_BK5ri;uMnJ_YA z)Klh#IdiE=1G7e^tyS|zCXUP;nL09eWb(-Dk?AAzM-qT!07(Io1FcE|k_D|w1Cj@= zN(7P#Bo#<5kYpg)K+=KaL#q;kWJIe{g5(5A3X&BhEl6IF#2}eLQiJ3MNe+@7Bt1xe zv?@VJhO{b0NRE&sAz4DwgyacH6p|?g5K9LMn+BGM2Mwbha;DuN`I#u{8(Be-%)qJ(m2$OLUpEfKATDpi?jO^1n^ zY7{|GjV-A(#I-bxPAdep#-XIw5)M&f%kB9-Z~LO-U1$23=YH;(o43F3KQ=yTN@v?Y zo*L$VczJG^m+$o#n2*)_y!7g839igN%jKG%Zi(-fjILa}YN31G3jfOW6MWoRGu?Va zAGdqs$YXj_&@uOx(A#=T>6h-U&$i0%3#EJ8#V_O!nIqgedq+CArvG)2Hm0)ake)eWt~J^?O9NK5JX6&NWEa=j&vP^HrPVg*)luV(C(OY4bu+S~yK! zUYa0OrbAvydQ)6Y3zvUHw-wjM2g>UqexfWYRR3A8jdBME=o=01tD8Re^v#E-)UC1- zefvzFDlf{_clMWvyE!MNUXv^CE!!>c&tEAjk~8Fk33J6?F{$!l-?8FRc)YA^J4ZbZ zh?Gx!$EYWjqxDmFFZJwfPyPI8b5*scnf7e*DBH>^?X~2Jsy0XI>at3Bi|4Y2QwbmY zWm&W1LE+otfPATLmhiK!mwx&)QA;hBwa=|n{;ot>Cx4L&*!-!kyK0IGTsm3Tn>$#& zoD`+&k7=hGM2F~xJ!-2)A+>blmLcL5-*VlgS}oD!zMpJ*rCc;Saa%S&a9jlK_)WgL zc86$@V$-jEvRt%`yQ*7F%~7o*4(r#4e5Kkn*r%Pf6UD&r6gg<% z05PZ{TStXXP*El6I@-UVirzI{4=Hb@hOS;Jhn=wt$GmBBcwS2pGtMEM8Q$Wp$Z$Dg zc9|I2EKtVA9Tl-pE99t%En?L506jXWNR2*xPmig}R`FRSdTeRB8n-A{zf(9}jZavq zCu9y+!m(RUTry2e3eAx3COSldf2y1u6)q;1$IHavK#}-sq)hUu5J}sDK| zJ^lDS^{HyN$BESAm{nq0t@U_`Io+Bm&BxNAITjp*4Dy{DpZu<{966 za4Ir_mSzZ%F+>It8AW6mk#R%@5*bNkD3P&51``=gOEa9vcp?Lej3_ds$e1F7ii|2U ztjM?`1B;9-GPKCpB7=*JuB91XWPB~n03#!e3^6js$RH!5j0`g}&d5L`BaI9-GS1qloi86-58 zCN@ZLkmw-cK`TCdAwWDPLO_U+7$HGIqJ)GAi4zhiBvMGIkXTuoU?I^$!iB^O2^bPF zBxFd;kf0$^L&Ao{4GA0)IV5yQ?2zDDn&=_nL*j=75Q!iXLL`Pr5RoV%VMOAH1QLlP z5=u)GOC*?;< zcqH;j=q*j`k>FdJ=p*5`H1S6c0OSZj4gusCKn?=rC_oMaIV+1)!kfQ`SOpxOQIZ$jbc18S8FBCs>xI~x(rh}tPSm&^g9UPrHg>?$+ G==V3NSezID literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Montserrat b/lib/pytz/zoneinfo/America/Montserrat new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC9?8oq9TdcLHkFkm%@A^Ey{n=mrSD)MG9^c*ez5ia% zK(KjRvFjgan*D{#>9v>Vj^C}#G>NJNbxp2Yqt*1v%QBx&7s1YHrwN?s&RKW!HUc z=4~I)cb1$rcdd9<-<|%N$;sQPbD|$fuDed>j=U;)Ka}eEgI$t898d-Q&Eh$}P6@s3HgpvGtJUHgqH$(e5!77wb@87Zc0_E)-W^k=hZ>O)PNzv=Fx#+-BuSha_EG9 ztVEj~ZGC!Y`WdsUwq5UvUY2lKuL_S0NhBkpB7=veeIlry=szG&UaC-gIzsZ)$t=~e z$tQcCcPUqV+<)*Bi@hIqxni-8Psb(1VyE7AC9d)ZT-8Da035gJrjSwV*fD~ zV@1f4kToHTLRN(=3t5+=T^O=5WNFCS&{-T0td3)s2docSAhJSaiO3p}MIx(2mWiwr zS*W93DY8^#t;k}L)gsG9){86{SuwI?WX;H;kyRthM%Ili99g-eT{^OMWbw%Ak>w-n zM+$&c04V`d1EdH@6_7F@b#SzWKq}#AOM%n^DF#vvq#Q^+kb)o;K}v$u1Stwq6{IXk zU68^cm2tGCL282(2dNHH9;7}V16bY#kQYNHMjWdT@sW4Jvq{c{*@&8?Aw+(cO4RyYMfv4D0nC~wt@)UUr-G2cP_@SKu literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/New_York b/lib/pytz/zoneinfo/America/New_York new file mode 100644 index 0000000000000000000000000000000000000000..7553fee37a5d03e9163ee19b1ced730a02345cfb GIT binary patch literal 3545 zcmd_sSy0tw7{~FW;u1<|in&Fm6{0L|xKd(jgnGmUQpxbGKnAsVN+m4AN|bP>tkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrHf!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj->ozrL!U%(GouMi>H9_XT=}`?E+~mJT0XO*zH~R_6OYt*7Fr~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqyMS(m14ZNb8W^Aedx|s_=_=Azq_0S0ka;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ1axWk^19CSY zw*zuNAU6bZMZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C1GyYGMZ zdWM^$TPnSOTvyv~I9z@9@H}&uy~R)V%c~#!s?JRY8$a7?k$H!Vun zv;1i$C*QAbP8~P1lhf4fKYTLhr*V~g;WxQu_`J$H_J!Q~+A%dZ=9$}e_pAJ-pH0Em zL4AAG8FR<7=k=Z0Z<@lQZMty!6Y=|+w14ysDf(rho;TDd#Uo)=5|2vh@dc`^XStO3 z=ctNE#8h_vrRE2M=B`ygsH(zwW+FP-1qWkUEA@lEa}~;?r(TOmbPqCb!9QBuUo3>Cytmv$wnO**>8dwOLXvf zyID3~pqIb0PgZd7&+IyXzgP3Kg2YC_gjy{*<< znqk)Foz-j4TrumWkLc#ZBj(}J{d#@x3G>KMyWY@p&}@va)GG9zZ0c)Mn@a}d(MU+O zWOPYupiH%nH%p{2OGQ4fk?1uODta_ewvK(J+6ESz_VZWtW3gQG_~3}{Xqq;1;Dmmn zN}JA(gL+%`$7Xv&x86Seqr|EQRBUuux^lWy*U%y9o@`c6#rMe5-_)obJrQ~4WS;8T z5R{!SdMY_7Iq9kt?*Hj0kvLlGd5OfQr;}0=iBs=*sqQ)5-7{B&!d`uyJ*(|`$ezZq z{Y!gZlDmcfeF}T+58V_ddBa`_dv#vkU5iWunFlfvWG2W|oOUj}%Vdz*Ak%T$`5+TQ zW`s-$nG-T8WLC(uka-~!LuQ6d4VfDE?F5k-B2z@>h)fchB{EH9p2$Rz znIcm~=88-fnXS`K7n!fqP8gXnGG%1W$fS{3BhyCajZ7SwIWl!*?#Sek*(1|O=I^u# zKr(=&0LcN81SAVc8jw67i9j-eqyotWk_;ppPMZ!SA5NPPBqL6n5+o-`Qjn}5X+iRW zBnHV0k{Tp8NOF+uIBj~6{5WlbkPJC(ijW*3NkX!OqzTCrk|-ooNUD%rA<06rg`^9~ zm(wN;$(Ylo49S_(CJo6Nk~So7NaB#pA*n-hha?Zl9+EyJe@>e~B!fCCOt z>E>`69p~6;%XX%lX)e0uT3eR8!>uHjrCT#yveqkQ`EfDGI3jLAY z1@4%&i$hUOwc)X~b3)Ojv%|i;@gd(!tHZI0qoLTtSKPSoJ`dgY)o?g|_!oad=w$fz z6T|*HUO(q1?mOwfbI056UF%-)k83^PCe?59Cs+2mdKie=;14-t!ABxJ z`AeCR^@hl}ab0J|y(}^>e4(?hwu|h+cXiI$7uC%EgF5%<8a1o;c`f(2Dz8P!{EmDz zyW%-Hr!GkqWHigd{GZia-@~#fb(|==e6K9N{*$=x!#H{W`Oid&PS+2dJRu(36{F|v z-!J@aS9R&uw^Uip$NHh>eX4xcn|glv1~vbOi?U*7pQt!{N>(PU7nMf`=d&{2WkvvsiGk_vwPh+LZHiKQQ8 z$@&`?MZ-Y6eC)z0v25#ze0*>~EN>3W75&>qWA(SXY5O7d#LP4L$(C)ZIYH}{6)kGz zwcWZUqf)he(y3d0*{bz$P_Me2pq}2_A)g6niPc?8+JbXs`_2)uws4Yswk<3= zlCR6onj@mqo1nXj&Zw>nzv}LZT6GVe*Xu@itM&cw=ndyO)yCdebRrA?Z|W;j z5s_meqQ>6riH`Bz!pHNPM;yoVR=Q4<=XEqj#Mp1V{Uy=flRR%ryED=Ac5aSHwf89t zs{_uWI`ckiUN_*BnR{7zz^ONuR+&p_mARP{!H4tLCGg)%i!=Yg%h_#S?&+iaWsYag zywJ!V?Acy3ge(wQq17xAStHs-a$%JmvrJ%}$U>2oB1=WqiYyjct<@|SS+CVB7+Ep0 zWMs|AqLEc2%SP6XEF4)mvUFta$l{UJBg;qDj}!o@fYp@1YHENK0jUB~2BZ#1A&^QS zr9f(d6a%RSQVygZNI|ToB1lQBrY1;Hkg6bMLF$4O2B{2E8l*NzaggdDV^~! zsT@)|q;^R0km@1jL+XbV&}u4(l+bEwh!oLks)&>ksUuQIq>@M}ky;|fM5>9D6R9Us zP^6+rNv)=)NKvh(sz_O_rmjd~k;)>aMQV!_7pX2%UZlQAfsqO$CAOLxBSp5FDkEjK znmQweMkY^2&qxgC@4sK~L=H+y`sF}Lt>oMWr$!$*Ti*t0EYF(824p z2-j0OlG+r}@{No>dQ|MPrY91Xvb>&Oy&UKE(6St-_Tp)GocoH^H9ccnld}ulZ1X2M z_8-DDz8^7P={N*JBDtDSNGv2663x|wL*lubfJj6nBoY$|ibO@iB5{$xNMu(N8j0;{ zf+Nw9@cew@BLg5KAVVNyAcG*IxSC;*aa_$n$VkXg$XKpsFl03R_u+hI!;tR_2W=Xp literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/North_Dakota/Beulah b/lib/pytz/zoneinfo/America/North_Dakota/Beulah new file mode 100644 index 0000000000000000000000000000000000000000..8174c8828851a7ac72aa65cbd5135664152e3182 GIT binary patch literal 2389 zcmd_qZA_L`9LMo7TBsu@`mIv{CO1w5Ck1LbiQ zcxXTd`Ce1keATOiFT5*4j_=m9j`fOb-|CU_rQPDXj%VcT=2~%meT|&6CQpRs-y`Ry z$BG+bZqR6c-Pkl=-t_cgwP3X? z7jFDm+?;w*-cs<6SQL6v#>8n6Gxep84caVXM^EUu%U-c~_;nqB)=>%F&+5b@C2Gme zI-Rt4nM$r}l_^{1s-=1B<+74-l^S!eT%Pi|x-B41rbYZN(!L9p=@S=3#s@#k+rJnR zEBb?U=Fom|$DW_`o%?o)tcFkYU9J1n-9`O6yJm;VS@OKj%~_{%e>)}f5?Vyw**9c< zNR`Mx(jg1Rvc!F_9MCJzRjR_4W_AD3B&VpdLKSz;b{<%fsY)8JIIAL~m3Q?y$2;j! zrKy8X={HfLJnA!F`6(gROda*D?Ykr@M!S5Kt?!EmhqwExY7UBZ-IczF3g1)J?aj`^ z34Q93x(cTzq)pZ4WjeKE8&q9Pv{QGwK-C9$occrYYU8(GJCAnei^tBmzQ>yuizg0W z^fi=*2xrep-=@@G#O8(rzAd4jimgTMzO7SZswu6{X&ODFwuQAh+lB{J^Y{j5d-q=T z)cFFZWoM(>F&OXctSVExx@I|k9`l*zZ{hVn{>+&EuRgBZ^MVkr`*x4V-*xwEG1I;W z+i&T*$ND{Uco$(S3%#N+$2_adQ)ZrGugEq(XPcinkNpPkKd)^B1^o zm49$QvOZ*i$O@4qB5OnzX*H`vmWiwrStznnv`giMwQ^>$z-l>`i>wz}FtTD~$;g_m zX3@y1t!CNCx{-w=E9Y1`vUZNeBdh0FKC*s}0w5LOC;?Ifjv^pcK+1sB!DeZ~BuGs-ih@+dYRZDt#cB$JREDE8NNtefAk{(2gVYBp5KREwirNWC};hExnG8B#N(Xh_wprff*vtfp{C z<*cT3NbNX^hg1(KA5uT0fJg3U!RgtnHb+wwpB9*n8(jv9Bn&KkWMas)jU!=es6-G+TQDdaY992fj z%u#2nDKt`Pt0^^7YpW?XQf;K%NWGDQBNazVj?`S(e*Z6W9%Kr?+>}1PaA{&nVp4oz La&lsFVp7nb$wqRA literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/North_Dakota/Center b/lib/pytz/zoneinfo/America/North_Dakota/Center new file mode 100644 index 0000000000000000000000000000000000000000..8035b24fafeff6fad833094f814905213b2f3a0b GIT binary patch literal 2389 zcmd_qUrd#C9LMqBL6kp%J((235lRa}0Rh8+q%0r?c?9*yA0Y~Z5X<=a2NX5>l?>*E z`D`x4{t!5FwK8OGhI1p#YOQoGY|hq3YRego^{l*Lf#>P>{&m?!>!Pc^XU};*IN7-Q zy!`7MpUV(`yAb^^JY1K4c=sy()pIn*e&=+E>h18_eQ*4%zTOvMk1o6^M{A?SACt}2 z)Qt);eYxDa^<|C-3}svHdtM>BV=Yf-Km_f$WzEf7s6GG8FDhY-%ukLA?Z{7RK zfLgH5wH9tVC+^Faq6y^6`=*DzC0d<}Ybi1^KIG!S5f7!j#QU z;l;zED6+&UI)&sA%hZ_2gac;#Pr zN&2Tfsw{Izmi-hbD&oF%Dn1vYGH}va-+xV1jdeTK?VpLKhIctNbw|b1J=M-LzE4$c zXRF+h(yyLvsFHP&9jd-CSJqE#R1Jyovf+HOY7Ftn#$(B9)Ab+a^Ib*ag$u5;xn;3< zap0=cR2C`ZfwRt*%wNUUrgxofQQwH|C7sUpz=Uea>X$8J7u3$^4!Lu9P_<5Ol)HKk zs@<21<({@?_0mwXY^y0(FL%$DK_2~?5p2Tizxd3W`Ja7Uw=Yx(*ZrW^6YRQg+alDw zN0@KRbx$4l%;jA~uk`taFJC`v^;52&QoqR4Kj-P6Igj}!{(oNEA^o4t+9v<{E&Ug` zZcT8+amxBA3#6=&vP8-nDT_4KtE4QGvQElEDJ!LBsl2dOPA`_QT8`yX)=OD1WyO>w zQ`T&%7fo5Usa`f^-IRq>R?e|>%Gx;=Pgy<3@+s@*5P+fphXfQ2I7FbRK#_r>gQ*sR zq69?>iWU?xC~8pTpy)vngrW$CBos|JM4_l+s%4?*VycCqD8nHQMH`AZ6m=-_Q1qb) z#Gw#HA`XoxB5|lhk%>bmQ!NySQWU8;w4#VbQHvrMMK6kA6vZf#Q8c57Mp4aF%f_Lb zsTPhyIa4hihjtY4DC$wtDN37asVQ2UYOyJ5Q{<-TO%a@;I7M=b=EC^>zu-Jv3%^WDpX^(jnvt58 N>`PBiO;1e={}b5BehUBq literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/North_Dakota/New_Salem b/lib/pytz/zoneinfo/America/North_Dakota/New_Salem new file mode 100644 index 0000000000000000000000000000000000000000..5b630ee66715d60ee99405709d86e0e6a0c8b29c GIT binary patch literal 2389 zcmd_qUrd#C9LMqBLG;K8?3allAe5E_0s@L5Nm)Pyc?9tYC`3^pVi}KrAW_3#NiZ** z&*p|$Nnqq^&4jrbj*T#@wUu*Wb6Ojvt$!BvtlVOOJpJCkF1zTW?)siR=k?%Z+Sf>5zfLC?3M%&#l{h&VH7itgBzitgzMTkGf8szx* zWn$t|sl4$~rkLzsCfzrph3JTqp0-I5^z02eWlN+Au9{;{Et*i%R*c);lo93KpKgc9 zAr*4GPx_pr>ZY%{?a<5bim;)*_Kefr;^sHIq&&P=+_LWlIkTxo+*(&HXRXT<;rS2A z*~{X^ZSi-@hS3^|v@N|3bnIBb5-!(h7`<%MHajv}M`4ej1 zT362B^oh7L{ffM+;BB!W{H%;iutnVDmv+2wtB4;tV<%h-h=qf%*@+h{mDKs7oqV!b zEo!c{Qw}asskJRKZQE?MIB$boQaq;8;~thv(>_=CcoSqs)bAqWyHL4o{IbaW@Mn4N z7XxB>ug}gJI3n&l@RNQ2%X`F%`XTi|%YN&@!lNp?y57oJ)S_~8R$IBhy(99HHamG2 z4vYM-LMQ*^E>SR=>OAtwA+_@2D$BpSNv%4SVii`DtD^Rq*6QV1s<`30wI(`N1=e1) z0uvrplHPBXd=n$eVm@=qJ`iHv zP1X}hJ?hEYa;rM5Rn_EWSv8{@Rc&0XReP>L)pD#ZZr`q$y)8}1hbK^qs z%!w;beMy+G4xDwir2ite*1zs-3;$GXFKlzRPmZd_j2^3T{Cr+8?Bw4 z2i2}i1=jB72K8KjqSahks`hluu!21LGbPx->%aa?oBAJoT(@gf2-kh9%MAI(TJ+pWh5i9)x;m^^}8vT^&rzjw@_0QS*XU=23A@I*@JEZ@!S=-oOzoq{I z*R2c=?M2pyED%{CvP5K!$Rdq;mB=!Ybs`HzR*Gh+ys%bIFBVuW$8wSNA`3=Vj4T;h zvr#V^S+!9w8(BB9aAf5iOGnnuv3O+l9Lq=6&rtxR0vshkYQRwhqzXtGkUAK3A&^QS zr9f(d6a%RSQVygZNI{T_aFhh82}e%W5mF?iN=TWIIvI7LkV2hZGN~9#TG}enm<)|o9QlzFxQIV=5Wku?0)P+SVYt*GhYHQTRMXHOGm!rN&fjKISl$fK&NRc_J zjFg$9&PH8mq|!!RYNXaiU2LSw#i73`6kleMH(3k&qU_eRcdSSiO3p`P}}bJi|p=?YI|RY*wOe{<#hJRoyEf{ zx28?zZN8#*qm?`5p>)5AH$3ZJ3(rK9ad>G$l--;&%4cqgiq0wH z$oM5uSu<)>4b+Qj=cGE?)g_N5jHu&YpR8HgqiXXDW$mYc^2Q}d?<0@$g+)u>olJG& z?Ppovd&oFB86{2yGK_|yInn5jG5pM zZI5gjf1u9J2jsbd8|wVDN4B52eQR@y--lEnXqP|6~KO_N029OjO zIY5$NWC2Nokq0CZNG6a}Ah|%2v8c0wq+?O%14+oD&IpncBqvBxjI1DOG4g^W#>fnk z8Y4GIau#)Vkn}9-{2&Qh)EPojgyaZG5|Sk(O-P>bKS(q}_nW4>jdv#7lkAD{PKU$p IuqQ_R0G#QIJOBUy literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Panama b/lib/pytz/zoneinfo/America/Panama new file mode 100644 index 0000000000000000000000000000000000000000..5c1c06372c6dc8610ffd14f74e923bdcb9b21d31 GIT binary patch literal 203 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZI3_m{*Mj7Vy^0M7!Y13+X;F7z1?pKlh^|YIFeoCwuA9r(K9}_p89CmNo z|D#&@P zaZ84pMet0a8|vF2N}tUVWr>6?-}{SL8;R;$8@>|Zf;IZK@=>`bzgtyxz9Fj*9};)hKPPKic8S_hm#nL;5q0yY zbfjdbicB2S(Tr*pJ=38#&gQFoUwB5|_gR&Q9c-7I&gO~w?J?;Nq>B41L$YE2jM$vB zRK^=W5b=xia!dGSvE{p~_14^TYU_J*dRy|8YI9j+Q~ab7+$i zu{Y(eqwQjM$$)$)5fd#L-Lf?j60NgMGEuNpBtEW`ZHwkb+pD>9&&;G~A6TzDKABUG zbmizrPfw`M#-x_VN7Q3srT2CYs>d_m)%)su)V}1j?5Y?LT~p(-JF8oCj}FV8g(lHE zbVMHbyiy$OOUNhQ$`yS(qVmuQM>r{gD^gNZuKdT(qQ#d#X;=O6^dD&c%gp8blU^Xz z-qU>l{V`{W?|-WT*Vs3iya_Dx{kLCpR`~uaFFGrE4y^j?``+GOr}}}oQ|+3w*__&1 zZgHoMV_wv7%(LSJ?63U&_UCi_Uu<`%>vO{Ex$R4d`lgKWuW}C=9k&Pld zMYf9U71=DZTV%V)evu6$J4UvQ>>1fKvTLi^HW&LwHjeBZ**da!Wb?@Gk?kYvY zzmSF@9Yb1%^bBbl(lx7T8`3wcX&llyt7#q5JEVC?_mK7>{X-gv|96nTg5#u{ScXkB Ui(_kp<-yY8SXo)HELfWU8>6UZH2?qr literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Paramaribo b/lib/pytz/zoneinfo/America/Paramaribo new file mode 100644 index 0000000000000000000000000000000000000000..2f05b2364443c95e1209aa9fa5f9bf3bfed208e6 GIT binary patch literal 308 zcmWHE%1kq2zyNGO5fBCe7+bIb$eGC6w_~4{_mN9UI$O8}BX&sdGu*+z$i&RT`v3nK z4+aJxX~Vz*Bz-{Sxf2W=V3PO$|Em`mczt|B7y^LU&(%GIAvh=mWIP;%kl>UbP+k9l d0OTYP4RRKU200BR4RRicrlJ$MfNnPC0stz%RCNFV literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Phoenix b/lib/pytz/zoneinfo/America/Phoenix new file mode 100644 index 0000000000000000000000000000000000000000..adf28236a2feb68f177f5b002ea068db59eea997 GIT binary patch literal 353 zcmWHE%1kq2zyK^j5fBCeZXgD+1sZ_Fyk%As=I>^2SkNXjVd1Qo4W~PKCY%?)FLS>C z>6#0TQZm1OlnVTQ5y8O32!zZ)$jJ2n|Fm}u4FCVHUckum|Nq;uKkB@H%gRct^ z2Lo|<2+(i{2qEkw9-vCFlYT(;{0D+K7M=|t8stO}4RR)k200Z(gPaSZK~4tIAZLSV apwmG#$oU`|{=PVA+i0TWi#u`ZvzhdM%$+%7#tJU#+DlMZ`rOghB^ccTNpXe1CAJ?lj!yO`X+$Xa7 zD}{4#oyhLoEOIU+2v@K|<+gkkc}1Jl+LE_oOL5rTT2tuuWCzSbPnx^9$}7#DD6zXB zTb3N26s5^=(&v9JeBY*JS^f=C_F<7#p7K#WL$%GmlkFRNMEk^u3@3y|c=)R9 zm(zwn2?~5sF1LbxRAh*$dJ&G*pT3m=xlX(NPM)3EG8$w!$as(eAtOSDgp3Ip6f!Dg zSjf0+^}vvk+3KMoW3$zRLq>-T4;ddaKxBl-5Row&|Lq!M^gQ$ROf$WC&Rl0srq|_i Jx|}&NKLAT8kYWG; literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Port_of_Spain b/lib/pytz/zoneinfo/America/Port_of_Spain new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC)iGx$Y%|#HG)ms7@JMamXMP zH$ikLz1l@^I8eclqg7&_a})%@9GBm5a7W%pudJ_5h##xfzu{(O{LOQE$8US5`G;cf z@>B71!IYk7)mbesE7Rwy5_PKTz@x2})@0ZmQ?-Lg-i61iuCwxGKW$&zH{_e$RPR0G zCSIITars&{g1T*-beLw;r#=b=GkAHZGTZlN=xELkPlje>cT0_S-x|@9`sH%&qF)H- zZmv>o&Xuj`n$KyG%`LHMe`J>a!9T^`uu>>=gI`BtYOl5L7*zx)Vf$><7^x SH-KrNI~d4u2p7`Nytx-*8x)J3LP{?&|36G$-EvQ$HOk-}w2#F>`MA6+O3Uf{mTgZzd$Xp>fh} z;_Op;-Gz2D@tadN{%oU}bmoYi{K^qYINo5df1*LAG?&>M>dItlW!NSbh2_S@$@Zp< zWJ#K}NTM?UK7ub1yPn*oa&6?Hms0sGu>-?5VlifZ? zL$ytkvny5?6jsRXYc6YU`Vv`~^Mx)NpCaMuKilx7F~Zuule&^O+o%@E%@UtDa_ho3kQ!&QGBT_>e(yB z!y$X`bI+I+=T~aUzGk!XbhegOSD95ur|9aXd1g)H6W+Lm(Yh*NF%5r z776$(^6x4fbU*YejPQnA)NSW4ZMQ+xfan2H1fmH<6;G!N@1hJu8;CldP9KOu5RD)z zL3DyB#n1|(7DF$HVhqh7sxfqfD96waq8>v(Pp2S6Lx_qH9U)3Ww1lY1&=aC4LsN*V z3|%40GPLz{>O%DObP7W>hNukD8KN{qYlzwqy&;M-G>53p&>f;YLwkt&4E;S_0vH*9 zq=1nFND?4ffTRJE2S_3ynSi7Mk_$*OAldMA>0so;(EAjyGb$J3<;k{?f(AV!8fU5Xevf+UHNB}keWd4eR0kts;3Ai07h3z98J zx*+-TbP0oG%+sX|k~2@2G)UGUX=CIKk~l`@AgN>I4w5`Z_8{qFP35RySpmqJJm xJzWwZS%jn!l1E4)A(@1v5|T^#AM`TLWpts-X=cfSU`{YQvm_J>hJx8~e*up z2QIVKFdO+qojRu3TI7E>r*n+v+A5~C%(*eYR$D9?=lAu~2W~#d9`FSV8E)B_;nW=VcR;r{- zoT1}}4M=LjI8FT{#f-mxSJS%xF%wSxtm#L8G8y~6(`TY1<=L(KbmF2rGHK1X_PP8X zdA@9~ot%73GN){{nSg_aBZWY_9T^|{L&vMOf-(&)vVa;i+Ho3bdYp^~l zdFzMiw2~@$altLk&zvRG^DgO(p=lBtf76C;49d*A7wxR`*W{%i588sm1E%ns9rop& zz2=oq)xO$z(!AF8q0X-O!W7jtYB;CO6o=<)@t(({RwCRNzd&K4=_3)(rK zeQf4lou}oSo6WrAxmrEGL` zb0XD)$L*4iU!bDytTbqmuGjF74ZE;9{7ecEuyZ`R!SRo~uSD(_s0+qKcDvhMJ8+gO<-y7P>EH~+T0*Z8Gf zpWH1QDz@7VgMB6%>d zf-m8T$Nd@d*#G&j-{{WDEgrvc(l;a?Kk&H>kH^10;!EOHM%{lW7K=$yI2@6ph3;`I zc6@k||8U6t4?Y-oO0FePA z1Vjpk7!WxafDAV8wP2m>Szj6gsl z0SN^p7LZ^-q5%nqM;8xBKs>sLKtkft#RL)*NK_bMfy9Lo7)WFop@GDP5gbT#7~%2g z;sXhgM;9STh&;L&L4pK{5+qEJI6(phi4^?bhbqw(Y`QC$zkFICFOcgm4+aClKyKna DVcF|n literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Recife b/lib/pytz/zoneinfo/America/Recife new file mode 100644 index 0000000000000000000000000000000000000000..f0ad7b9897b44440a9120b60f7f4dba436417cbe GIT binary patch literal 728 zcmb`^J1j#{0Eh8guSCUTF&NaLkr)goO4^1&Oe74r0|t{26NAAbVK7OIBrInXgWRz- zS!hi}h(z>3Ct)E}(~{$S$7msOntOjuliRf4-<_JD?Q(vca`Oonr^8&luU$2_t^K)J ztbQmP`|MZo=WcnJUJ^Hv19|gUl($WH-fetRq)Lr4y_*)9)Q-%$A@RDcy|>C~kw2P| z?{xuH810mW#IY#)FTCPjL6x4W} zRudYDZ8gD>=ty{eKJk$OkP(m}kTH-!kWs8=7-Sr)83-8(844N8Y6e3_!+#&nZ#E41 FzW|kqAr}Au literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Regina b/lib/pytz/zoneinfo/America/Regina new file mode 100644 index 0000000000000000000000000000000000000000..5fe8d6b618e34c4c87a7eac43f7a27af41161d02 GIT binary patch literal 994 zcmc)I%S%*Y9Eb7Wl%-5VyqJMViy|}(1r3t+8kQv%jsz`aOcZA2p+6uFEegTK3$7jA zL`0A)FT<2XUdjsyW0`kP-ZCvYM2iUO_&%@gbmPi1yytT`%rJ}R8@(TIz9RdsljaSF ztIQmpb6s zu9t}GFYyQN%A;F)^=5^;R$r{w3k%$h$}06WyIeLe6{*di`Ley?tMAiO@?#{ep>QT XtO!|>)vO6w6tXHj`elX9)XKuUTnF}q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Resolute b/lib/pytz/zoneinfo/America/Resolute new file mode 100644 index 0000000000000000000000000000000000000000..5307941313d008aa988ee85b6aeac1cf9aae8869 GIT binary patch literal 1930 zcmdUvUrg0?7>D13E{%;^e<(zPvXaolfrFt(lm2lSgG5q~1VUztB9`%%dW56V%f|Fd zxp0}SM%k#FYUWsGYm)unTFo)awN*@Ox#q_F_GgO)nwZ?MG}ql&lq)&& zmgt<>laiY;M{|G8vAO3bG;jD%yXk}PHUIc`R&exvy*YWc+|qMY=dJxkZrygu-Bvy# zx7WVp=4Zbxg$sII;nc7dWo~jsql32KVwGDs^sI$PHfnMIZi@`YwWPbrN)OG~Xj@Xs zcFfjA)s1rJ>Wf-lxJ(w8eW6Qc`qtOvk^UC9q3%O_bbps_EFQ2;ZOz)6wa+$JSLo)k zt=3jpplx5)TKlY!w!ay+Z5Mvj#}3!X<7d)tdvc*XaqPV7Xvh-n`^Y_6eo3C{c-8I5 z9+sW;``yl|F-yh)1RFV%}l3Xe_5_fr{8@ml*OxD^Ya z@sDSf=kz81!y%9V^TV_s2MsxD$YJwuj+=4dkRxXtI^@_H2M;-V#^DF_#}5$zA^}7M zhzt-RAW|^IfXKlR1R@DT6o@PgVFG$-AmRk{@<0TFNCXiHA`?U?h*S`(39|1-PATeMB0TKm97$9+A z1OgHXNGKq&fCK{)4M;cw`glMB63|Bk5|V&ECXk>&qQVFZBrc4=KqA8k4J0;<;6S3o z2v0yCA4q@#`UpWn6wt>A5+q2JAYp>U2@)tsq~QNPRGGeDi+#bub&Dcpk)iGx$Y%|#HG)mv(B- ze6W>R!px_R+gevx)kf;}OUo%$?`+le_ammUK4lxFiuzg`)Qgqf=36mgzZd${k2}43 zX(*xG^<8?^*dY^cJDuO28#isOxqN%_vuXcyF5j_jt?DQr$;SexV`gtYUahDdxB8>W z`g5~qI&OLfSDT(Um!k(3N6o>)v1rqZ=70RPgqD9RvDu%UE0%-6KSR!yY!G&Cq1?2_ zxwloG3uWHO`*q9b!|61K3+)($;c}yAvqyQAz2}5A$g%sjDuvx-%DypZb))Sc1U_&$qz};D;Xjw gdL>6BNhC`oO(ahwQLki*q>AJk{D-|pB;uWspN(PoumAu6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Santa_Isabel b/lib/pytz/zoneinfo/America/Santa_Isabel new file mode 100644 index 0000000000000000000000000000000000000000..e1c4d161cf4bc7db648d23d1ba40f7b6c1e4b2a0 GIT binary patch literal 2356 zcmdtieN5F=9LMn=gi@sP4M7P=kBTvrcoGybDHUZ91i}>o6`u&ie7K4-@PV+J&6Rq{ zYMRSVYqqg6q6b1NXKS&Qb1qtmT8-3%D3dWYvhsG`zy9p6{%ig2&UxM4?e6aW@p*g7 zsw_9&Cmfy^?BRWQqrDEd-DqsC&-bx|U-Hn|Su6a8T z9P^%uEOok?-tE88P--q_7wSvpGt3XMDf)6&iRuZNFTFA8s&`GCc0 zbL+2irK4N@(sWkt>~1l`&YqItCz{NNqb(BL{HBrvO>);RtwQQ+WMuV56ST z)bwdGI(DiGA77;Jz9U>k_^0ZKffN&YVYH6w9A)nL_%}VKb6-L2YSX-Gy*c8js!iA_wKor{y5Ln(cW%3?znUXkjxAMN zPsho&SGSt&EhD7ii6!Q-J^kYN`TP0b<{vP`UjLn!&-Yw``v_C_xJ7j7YiG7zrwHjd@K!F8?rcLb;$CN^&tyH zR_JP%h^!G=B(h3mnaDbkg(53OmWr&^)h-rUt*c!wvR-7t$cm9A^LJP?vS?)0$g+`j zBMV1Xjw~HnJF<9W^~my(^&Z@53sM)PFi2&P(jc`#ii1=KDGyQ~q(Df8kP^At8X-kOs)UpY zsS{Etq*6$!kXj+dLaK$73#k`UFr;Eg$y{yCkfOQTsv%`V>V^~!sT@)|q;^R0km@1j zL+WR7C?HZnq=ZNfks`X>WCB)sU%WLq?SlAk!m94MCyqY6saguQlzFxQC)3S zk+LFnMGA{l7AY-KTco&1b&>KS^+gJdR2V5SQe&jZuC~fZnO$w2kwPPtMoNv;8YwnX fZT$b2JIH1|&1O9=FC{)HJ~1vYDKS1ZJ~8MoSood` literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Santarem b/lib/pytz/zoneinfo/America/Santarem new file mode 100644 index 0000000000000000000000000000000000000000..bb469d398cecf03ab1a026a9273ed52cfeb69ce8 GIT binary patch literal 626 zcmcK1JxIeq7=YocR?$cm!No!A&+2g4MVzj6>0kvlAUHWGf{QMSle%^nK?ld`bYm(G z*^1yGhz_NxUEBgH_)}Wtyf?ZDf`ebky+8ZJz1VOSLLunRr(+NN^wO7%~4f74CP%=P&GF#pAQoLOWT@!^*7X8 z_n5BFPO5t8S~i@T-#BU4&9G0s=kt2t;z$i{-RtD>ls}XT^ziPc8tJ;#*`;+QT4KLk zt;Sq>LKt&%6^k2F@*opm<{9 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Santiago b/lib/pytz/zoneinfo/America/Santiago new file mode 100644 index 0000000000000000000000000000000000000000..92cf5597689b43f56c7ff41d27cf1d1fbdcef27d GIT binary patch literal 2531 zcmd_reN0wW0LSsmJA$Ny8X^`Fk_Ey69tjNa$ctivKE;EQA)zt?G~VK+lGI!!<(B2F z*2K_erB-&p3NWd&mXL}Iwivmh{t0_qF)K?;eLCOUW`FcY|F(6X-TS$FfQ^5C-{V?U zS)OG5LH}An)%wtDXW#8`NF=u<7&!T-+omM5yeI3o|vU8y|c%-OSzZL0l zzMZ;XzTb ztNwH4(dg8ka*6UPOw`^HG0G=nx%9PylwaU6>G$Qh^1l)yC%iwT0!Givzym*uiM=B- zsC7_Wv!_Q+s^}DxA0O7ixx2)*9p~ z1v;!FMa}rQScX3nre?mGA+N8!q;A*|AtN?>iCHzCGBWiKabwOw9W{Ma%uca%wC6%4v{ELe zj|hicD(3}ei{zkgd8_ZRO0g!%lvDm9_1hRd|Il~hwi7`*?a2X=eqdZ@R2>!zT8FeO zIH?v^9MPFcudA%wXLWYSQz|E+Q|A0!s}=|El8eVZMeeyKx#U=lxP7QdF738dUdMcy z-{&WmIk)QNZHJXJ^NhZu-ybQMocaS*<+PCW|hGs=Hp> zBv*~i6L)VfmBq)e6RR6qbjkjU;vQFtE`9VfvBn|w+Ol4;Zc3ys%WfBCzj*8Nx$mm- z(OI%0XpgGs^^%o8Y*tlmV{-lJHEKgkzr62_ELB~6P}V$GDr(cV%etmaQ9t`tegBFm z(ctrhZcO$SjbrO|^VDxd^YMKB!19V1&t z_Ka*A*)_6lTeELu&+q!G5J z6G$tNULegtx`DI<=?Bsfq$5a6ke(n-LArvp1?daY7+cdBq&2ptH%N1k?jY?!`hzqG z=@8N)q(?}TkS-x@Li&U>3h9)sX%*5dq*+L}kai*cLK=p23~3qCGo)!q*O0a$eM1_D zbPj19(mSMiwx)YX`;h)24MaMKv=Heb(nO?-NE?wpB8@~kiL}zz^b%>Nt?4GxPNbhm zLy?XmEk$~YG!^M8(pIFeNMn)CBCSPwYipW|bl29j7wIq3V5GxHi;*59O-8zmv>E9$ z(rBd9NUM=v+nQ!0-L^IDM*3}Q8jf@vX*tq!r0Gc4k+vg!M;ecG9%((&d!+eD_iau4 zk^bA7GXOaUkh1_e50EnfITw(#0XZL#Gs6169ufiO0C8}D#O9~QCB!AiCpu#D9dQo( GE#Oc1o$gKm literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Santo_Domingo b/lib/pytz/zoneinfo/America/Santo_Domingo new file mode 100644 index 0000000000000000000000000000000000000000..77eab315b001966ddb1ad77e4cf3d6413cb0c260 GIT binary patch literal 489 zcmWHE%1kq2zyNGO5fBCeK_CXPr5b?5uKP|2zc{=v{91n4;s5fF7Z{wYPcXJ$zQFja z-h;W|Ljdz#77x~dO98C=J3ZJ9wgj***mHtYU|N9X?>!H!G#@4~GBYu=z#%I$>;M1z zDi|1mWC8=r|NqAiFmnF?zjFfv@BjY?b}$NpL>R<;d_x$5U4VqE3lIkbu?LXh2xK6G z5W-#&0;&glmVBBeJ~9S0uT)f1~3f_3J?tn4iF6r5)cgv76uxI3l}h;Ot=6&*Nzha literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Sao_Paulo b/lib/pytz/zoneinfo/America/Sao_Paulo new file mode 100644 index 0000000000000000000000000000000000000000..552ce7c29228ef36e2dce0ece5e782ba67d2b607 GIT binary patch literal 2015 zcmc)KUrg0y9LMnkl^ZxUxBgkFUt^9DC;Y-8M6)s(0>QD6pN3?nA_!8cME)!Ckm=gk zbY$g5t37k8x!m$`(}^w{IZd5&q@%SMBFRlfu56~KBXxS-kC$E4oj-QY>x=>8;`{yt zHI4Oop?^KMx$p4eX>%_=kBPar4?b%?&^4}XuFqjc!*^z~dDKdz$4qH!)Q3i&G8xr(+KfwUWa{+a?bP0rk~w9ZW*!|fS)nVM z^*eXbw{3ZBr>QtnZ=*|JGxIu2?K5TX%ClSQ zv{Gi6`L!il6?(t~MOhmBR45BGM(e`U_e*u^Wv$*bB8x8dXw5qV^8A@$ySSlSUf6rq zE-BqDFShTqOLM<8%bGgu^6|#hmcMFi6Gx>kdx5Pxx5BKvt65j>pKBVf2HMzJE30}7 zv}seZyma6`eYqx6UfG$@)icv%P3t{&?Sx@zUL3d0f1Z_=S$($U`+c(R!7uE(V`t3z zk&o?$y+6vv{+QkL)(-RfiI4P+oN_crrg@EWU{v+6iOzC22*ZN zCcphAlvWW9LUZO<@fNrXX7n~`j7&V3Xu|#8j&KADv>h1u1=&-ud5U(6{!^|7O55~ z7pWI17^xU38L1g58mSs7+w1B^3irCoksA9<4rD!$1wmE>SrTMTkVQdO1z8qkU66(Gx|Kne#_QGw zSsbrh9b|cs^+6U0Ss`SJkTpUU30Wm%nUHlt7Ru{Z3RxmlIHzGd@kO~pa3+@mI2>f*&H|@?;P9*`+Kt#JJ$QbzRY&qu~ zb8k1MbLPUum~(jc2kIX{<*#PTSX*Pw)ahcSrE@lA;2gU?&u@S9M}PHCe{}44eEBv0 zc|Tu)HJcjptbe>>&3Ab7YBg`3lXjU;M|SyA68udIrsdE0DlzkMS5m@tO^*IflOKMe zDI=%!xgWap`3na$bzo4^dc%_b$)_^CLnUMTNAg0jMP96~lb3t}$t?BA3`e14rDe;^ zXHq0P#;w_-`8w;DES)`^q%U8J)j56R>gfJWU+Mfwb9R5LxmyO*S$|SpHCH9CrBCLT z^-KPWgHn)wR0{Ljq;Mi6MX8-ybYr~~KWWvHLAShitx`*SYPGDtMCa{UsPm7c>4Jt# zUAVhg7L~`SyFOK3&$_K1_k@(ke52mXo8rCyt1ON_CyR$J%aW1L=)B7(%{$4^6uU;ZR)9z_Zl@7a?6?&iTv>(YVzOD))aGUR#g07pMCy-<@WNf2v{ET;Niji+`RY=^I!%3#T}2^ zf5iWeM7U?Z@skR3p_0NDd%6Odg%wgK4(WFwHBK(+$e3uH5p-LN&=f$Rse zA;^v(TY~HfvMI=}AlriM3$iiD&LCTZ>>Qj*e!>DZcagtGuP?xyPWouEACHy%)u=H literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Shiprock b/lib/pytz/zoneinfo/America/Shiprock new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8vb8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48Hesdb*xmIy=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE ze9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<*7-ulRIbVydb;yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fKBc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){FVrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Sitka b/lib/pytz/zoneinfo/America/Sitka new file mode 100644 index 0000000000000000000000000000000000000000..48fc6affdfaf7271c2af42ef24ec852d10e90632 GIT binary patch literal 2350 zcmciCeN2^A0LSrr5x8)LdP+z{q$4U*)e|GxhE zmM3#v|2R?RKU_|qxqPQ=H?PyX6V>>C-H|RdiI3{ zDrtPap7Vi6B_D~@x9|T>iOym2C?&)TB=H zz9iBnzSQZL+C;|437vWN1(h|lUuPd(uNHMbr{&8vDyLb=-1c0RSH45uQ5&c7eND0; z_ZL;@xla})%@9Qw7s}$vpT(UYdF5T_J`*K6Rp0&YadA&DN-y5GPu$yjNtf1AbG)UqGX%kr!NQGWK6tccw#Dvl1z)> z>V{pqW^ajFS<`Yd*Nfp{3a4G+989I=EK?T0KU#?CLiPayb%etxaqJB6=K0I+s zH0-=89~l`IYnsO7qeH#ov8v0uvG;&_JZn@x(Y#AF#cI8_yjiWivPU=jDpd2C4&CC( zP%Q@odfmlX_0-;W`Se)2c&2leT(4t9YoJiJ^<5Pk3TDZTtz)7+eo}T+9}yj)nYy!R zRCP}Ls=E@j>KZwxH(lGKHV?h0w|w5Awsya!w;c?#`cP^*jT)tDU@s~Lva~7W*ufuJ8a$T*RKTFpq2p<2yYk-;LPMTU!v7a1@zVr0n3n2|vvqeg~}j2js^ zGIC_-$k>s=Tg~XLX86eXkpLhOKth1T00{yT1tbhe9FRaDkw8L$#KLNVfkeY1qloi86-4FY>?m}(XpEFAn`#0ghU7l5fURLNJx~BFd=b5 z0)<2h2^A76s|gkoEvpF^5-+O>7!olgWJt`ApdnF1!iK~R2^pZtR{F!^sFX) zNc^lOfJg+95F#-|f`~*B2_q6mB#=lXkx(MBM1qM#(`v$r#M5d5ibT|ELW;x`2`Um* zB&sExZnn&<%E`&j I$(G(f0AVLoegFUf literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/St_Barthelemy b/lib/pytz/zoneinfo/America/St_Barthelemy new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eC#89ZrJ2Gyfx8HM9?nNnve zyG#aDR*(`ArK`qfH+xddQPYm&Mz@U+Ei@N1B)9WD_Qsl~rkUQg%=&p))Ny?A|Ne5X zEu5X^`qL{$f5MwrtA6v`{{j8W%I*2xo?q3}QRXXotbD_sMHOow4!qzFdtO}8-}};% z4$mt4xc6nN(euj8GXt+?Z1Pl&{UuN}_(jiaXYUKF?y|(Q`q%ovn&50t^%v!V*S9Th zsHravyisj6)V@;YeRJu=hPnqI@V+%Ssp0L}`QEkJu?_1cW_W$W4mPY$NbqhrBgV6_ z$N0d;wgaAbS_cQ}cW?9f%d&6tZ;z~e_wAavO}9wn_g9I1y`tp4ogX=YMq7SSHCHtH^W>M0`j9}&F1Lw=XkCYrM1)c3u6In4>}s`>B{rzJ9=T6XMq4mA1I!P<4s51T90kMH_L zYfZg8R9+ztFE5ur-M?5IDJqqr8!XY5Qy`CCK2iKUB2ykq%6ES0kto}vGo1Eg6V&nM z1gAp{QNKFT&IwyfFd_ri3iPhpzsJENb| zZ&F`%hWlsd%%r0#?)<)@zy5EDk2osg54XtyUn_Cej?d-U?`;t0)UKE3zOYgZoVPu6 zUTMB{{`A_=pj(PR8$9ZX(2&Wqf(iZRg%U3qA51*)xIHv>aByg&ZC}t58NAS+XAj#o z$QoYewv+3+S|dv0?UcvAw^HX^6iU1E6DvI{E;Mr5YHL(Nd+4IHrPjrffslJZRnXn! z4~;(lXmHHtiqIwd<_0gVskg^&%n4q$yxhM0xzymeqEh>cB~z^NIR*9vE5*tfk!fdU z^tLjOEC^){PY!17nh~1Ry=QPz-6f&P2Y(7)xl-DmJw2@KMPuwKYrBgauUk%C(JHRG zG+s`dw?kY#phMwuu#6S4z()pCH$gZb+d+HDW z{dZCS$=`bFzb732^huX191g6C=whCu^|Lb1j_c%eH}gr)aJZ#8qVLI{da`p}Q!@1X zYW<$7-`DWH{=mE*aWq!&mtkZu@tJCJ@DbwiMjAT2?9f;0u`3epy&FGypM z&LFKpdV@3v>5fsi2kDPdHwftv(jufsNRyB*A#FnXgft526w)fBS4gvvZXxYL`h_&i zs5^$V4CxutG&Eh~fwu9|eFGYYbPj19(mSMiNcWKTA^k%dh;-1XTZr@!X(G}^q>V@) zkwzk&L|TdT5@{yVO{ASjKaqwa9YtDd)ICL-igXoeE7Dh_u}Ei;)*`({nu~N7X)n@W zq`^krVWh=I-D9N5NSBc|BYj31jdU7mHPUOO*+{pMb|d{p8jf@vX}MAN9BDezb)@Y` z-;u^6okv=a^d4zG(tV`;NdJ)yVAMMR*#btr2aru*)Vl!L2FN}@HUhE}kgb611!OZI zy8+n_$bLXJgi-GZWJ?(Jo<(mmAo~N^Ajl3u zwun*h5oD7X^)5lSiBazpWTPNE1=%XdUO_esvRjbtg6tP$!yr2b*)m4GXOK-})Vl`R zHb%W~kd1@v9AxVtdk5J($nHV5kLw>ypNLL>SSA0DpT8bIv3ek-k4aC_TWMH!dU9HF P%CPLz)a2CUl-S<@u$syt literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/St_Kitts b/lib/pytz/zoneinfo/America/St_Kitts new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eCbxU^AOkY6*VZ&)VM<<vyPpTq@kQO9 zT&Amyy{t{<-nWu+B+Z?`+!i*awf<<@v-fhhcx^gzCEcl0(|xE(KR0ilrYGdNH#9Hp zro7%HX0YFoxB9>ix6dUk&sa$XsgY3Q=QHZuVPC|#vsw^3cVuPU-&voAl*!e6Ecq4P zf^JcFEtX}ynczF)KXm=@-|$!G=nUx%=?>`+ap0*QATA(2AWk4&AZ{RjAdWoM6T}t7 Z7sMIF8^j&NAH*TVBl90FGb(o}^95{L?5h9( literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Tegucigalpa b/lib/pytz/zoneinfo/America/Tegucigalpa new file mode 100644 index 0000000000000000000000000000000000000000..7aea8f9989fbdedf0527a172b41871e70d439060 GIT binary patch literal 278 zcmWHE%1kq2zyQoZ5fBCe4j=}xc^ZJk5+83Dg;{qVD8|eVP^#MZK-s1?z$Lclfy=cM z7Z{ir!SMh8bv6tP|NrmYz{v9d|LO${96r7w49+e<92^2P0|Y`yu=fW<3(&;>|3UVH hxF9!xXplQVG{`L=8sr`j4RRBhCedA7Kqs1U0RT7DJ_!H- literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Thule b/lib/pytz/zoneinfo/America/Thule new file mode 100644 index 0000000000000000000000000000000000000000..deefcc8df5a1111fb1ccacd1d67f44165d9a736c GIT binary patch literal 1528 zcmd7RO-PhM0Eh84LqakwQZJxJdrw&20%>H&4b}#$<{iU_1J&r$4u=#|` zk>>JxV~=?n?rHrQ^{qok2P)Lg`EtE$CROd8ZkNuv!y;z1Ou8ncMQnG9jJpvk+%+ZI z)AUKiXC>)`{MW)8wO#M=+!K34ESsQ3N2tM+B@O0Zh2PJ-*oD;Z0 zs(J0LY)R}BEsK+~ZA+bKn;w+yt2v@$v{iO4xI|ZXiM%`&F1jm|CX$a{EX$k2G zX$t8IX$$EKX$B6Z6W(YHiqmB*&4Dp zWOK;wknJJ+vo#w;c4%w1i0skUY!cZevQ1>4$VQQ!B3s4(Wv`)Tx(Q~&PG7vsU7zP?{nsD;+xp%3Rb81q1{ZSLwb6&?kq|FqGss#0? zOs&-_arUrWo_bjMyhGwMkEcP}#wV$nzxia=`Eiwb_7}Ns_>9Ur`ng>H;!!ob z-7|9@J*cwlel$5-2ld>*DRaZJr}T~KubAAtZ8|slk@$Ue+CTcT=b$iXzQqizURTi~NRb_>$nmA&D z1sip6WWT9SS*oj#wU}k&IePi?`((veOH^oQv#fkKTh*)&No`+(y0g4URy{bTR%fP3 zxb71bo}7?1f!EZUpQf9&S*P{dQy0y;=n-9ic*NX2x?ivFK5p(AYS9}S`pw3Hm0E?~ zkxf0#YI8xK+#3n0hLjFz3>K-z@p_5mrm4th<F=2x!yR+OM~ze`vPXbn5NVAEdpkPqmK@OGieB>KHmCotNs>!vlL| z$Jga*XIDfXc`r+KZK#%A&v+^>eo8|8)c?dQ7JDP?d9m2*FL(*D*on8i#8~X(lX1!J zPIdQ;6``x!5(?aHjObnSBGBu~28!|a$cF6RQ`5_ZTW{6A?nIkeuWR}P@k$EB$ zMP`aj6`8BkP8OLhGF@c8$b^v@BU47^j7%DtH8O2v-pIs}nLF*&k-0nVIc@Tg>^W`vko-Aq0+9?NDMWIJBoWCX zl13ztNFtF;BB?}j>9om2vgx$xMDpph2}LrBq!h_1l2jzCNLrD+djG?|`fPjyHoEzt S`Guv0#rdI#9KL%xx5K0lDV{5HB z9&}Q!q9On+5H{kw_6aVjP>^V!s)w&B&k z)DItfzy6!Vpkt)-?U@U$LH&KfHg&N)PhU#OQscfdIk9A`m~`dJ$xx=4igwDW;W%-5 zd`|w*_nVkLGbVrRJSP^8_3B0EhD7XOr@rRBUa`36O(~9biff-$a!GT8SX#4JE(>lK zaRmXne8YONA|*%0uPRa3Er^#1k=ZICl%*3dtWeJ3#S$+Mh=ap<)BX2nH zky=$6D_7TkE0VK+k~ePoP}~&vrF5ls3)kGVPKjv~DWjk3)T!NK%|M?{JKL((b|2C1 zle^Tq*2i`Fkxj}|eOP8RB&$qcqs%Iqud-dGGACnFt&jFfuXCR8P9(?;e@=+pGgoBZ z`A@~hx8n89AG|JZ>G(t6`r;vRTitj1_VymNsc1moQSqG0U#In*`Fqq|iw|mFa*^`Q zz9I{vJgQ*ifZTF%sk-~6le+NhTIFv!s_*G9R7E@Y>EbT8+PX2Qw>1e>;>^>5?I9Jo zlB7$szfz@_a%I`dpGDaxP8pm#D|VcillP6jBX+ir$@>SM7v&YFhHpkVxk62`H zVee&y{(!?@5^xlA^3A!|oZ^7liFqRaz61YaVYBut{AxJN(vY9vOr{o zRSt+tqWUW@SSY)+Uvs`4o$byj-BTMG*ux4b@$f}WLBkM*M zj;tJ6I7qDS_4004V}e1*8l}9gspGl|V{?)B-66QVpaWNIj5( zSWQKcl2}bmkfI<}LCS*E1t|~B8&WrMM`QlHARYQHC08* zYBhC53X4=0DJ@c4q_{|Rk@6z-MGA~m7%8#U)EFtU)l?ZNv(?lYDKt`Pq|`{Qkzym& e#{Yl0V@%e)ChKYbOm~JmJRgxKd&+gnGpVOmRCJdIvQ*xgZo`3u-tuOrt@i zgSl@RqGFJSpr$rWiQ=B((UcTH5AL~eJKyJRFB;!9)8jn%bH};7{eAy2aS7AA+y3#? zH~+)SbHluRZ}EZo$SGJZSO0KRe6uw0=$fzdBG#^O)$3xnM66#jQE%vyEH+N>r#HC| zSDOd7*PE+?)mHDH<sWb@>Y$MI~_yBuGCPu zJJMg|Pw^}*;Qbtrza{wZjzI&4psM?CtdqXCoTFP~_6qT@YzLOfAr^RMcY5f{~| z*sZ0f+pQ};6ZUE8S=-{`b6&Gc&xa;OoUh#^FEpPgE|h1;i(5Vt&aT-BA&_mBb0EBxFj0{9}o@O zmB<&Gt`}~$b<#~Q5slPh>3(jlYFzA)O$yVM$Ci(E)32s0&x~ohS<0KLc|xRaF|Mof zit^Jfd%LR_{Tk_39sI;g4XbtQx{XBZJ8rVgb+$rJ68Y`Xo2 z%SDISE4t&1T=h!WA>C=n=c=>UM(x{muJWxtDE-`Gm0$S=*|oB#>Q=N=`j^FsfSd#w zxT}WX!RqIS*FL#jKfp{p|Fu+tI3o;p(w-_t=vPq51oSuSE^c#s@5ze0@m_LMQP zM@7t&yK+p}Rx#$9haT&5SdBeYqsP^5R&ndg^!V~+YC?Lxo>(+XO^VOflXHeEVc#vM zES)K)24u;%9d;4lI9X1M3=-3-f_&pKA4-bwP- zvvbQ-Vr;FJnfdD7Fs0`tW~;eg2lTw?6g98*l1%EAC6dZZWOA!ykzBM+raX!h@8v9( z@1G49sc8zO;$Db)M&6J(uVC^?&NOPG|lKo6YGwQe4Ny=`7q~YiNCU zw?3N=v&Yy54K(j)^S))?5iw@GY_>YqN6f#EUZwe=HF}Tu3-dV5Gv`)v6*7Xz5F%rU z3?ed$$S@+~hzuk$lE_dZV`*sy6B$iQGn~kHA_IzyC^DqTm?DFUj4Cp$$haZ{i;OHX zw8+>ZgNux=r5Rpid@aoYBO{CqF*3%;AS0uU3^Ov$$Uq|_jSMw1*2rKZqm2wVGTxSE zz>yJ0h8!7lWYCdOM}{34cVyr#&B!A|Z)wIJ8GK~)k>N+i9|-^w0VD)S43HooQ9#0g z!~qEe5(!Ha3M3XtFpy{<;XvYn1O$l)5)vdPNKlZdAYnn`f&>PM3=$ek6B{HrNOX|! zpcNm!5Fj2CAs|FZjF2E9Q9{Cm#0d!$5-B89NUSVPu#jjW;X>kt1PqB75;7!aNYId| zAz?$}h6E0Y91=Ptc1Z9nP4tlPA@M^3h(r(xAreC*h)5KXFd}h80*OQt38kfpB@#?a z6HO$XNIa2%A`wMGio_HNDiT#BtVmpuz#@@FLW{%}39hAyE)rfOzDR(P2qPgzVvGbC zi82yqB+f{nkw_zH`aJQ8^%^p+;} zNboI9^pWsen)o9J0CEH%hX8U6AO`_*6d;EIavUHB0&*lEhXQgeAP0k`IU10|!O|QL z$N^z#jtJzCK#mFIpg@iaf*c~q xF@hW<$WekECdhGu94NNuyCVLl7mAxXT*Ax&<8KcQ>>e2GZx0Cx3<(T&`x_HwlD7Z= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Tortola b/lib/pytz/zoneinfo/America/Tortola new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eCmnJ5w3NlgS50YOnT({xr$L}ev0!n+jJFzEWqhQ&ivqqmnnp1o21F!!oHpl@V+xnDOm?@_1x$K-UKQD=NL^3(2*jIVZ^{JiBg(_Fb$ekodM&X(rNuNia9 zZ+Yo*E-uBKchA!ode2rZvEy`0`zX~K5~EuiBGtw7etl`*4b^tES^r+qq%K!?%9T|I z)zxyJv~N0Wt`*nG^@Z=7KeDRi&xy~Q8zal4V`!DS*{eWq^(a%fI-b+FTMAU?$$b6S zm(!I0fLjMXh*z$iaXRS5a233+r|z<9sOh>mR=Uj&Gucas{MqemyVm@y$~IxVJ~O1luP$KJl6#vSd{>8HO^PELRydJM{Fn znd;%UYh=cWWhQgOHktX!43o8Bt<2hG%I{Qmb>T#2EZk(Ps z;HsMUMU2k%H>&x&{Q6Pfezl;iSwB|0L**?!q8C=KSBo-zviOBM0`Hihu7N=LW>*jAJ=}S!8JQl}tSR#9-e=%!{_#gP8~Y<3wr|g_0TCFFVv4?3kXyYDIrorq=-lr zkuoB6L<)&i5-FvlttC=Sq?$-Mk$NHpMJkGv6sajvRHUj%S&_OTg+(fhloqM2qb)8{ zU8KB7eUSnq6-G*o)EFr;Qe~vfNS%>FBb9cvrABJ)Xp4^%tOK$T$Vxccr9jrg z(JltE8jf~3ko7effh^!#8gvc5qi-@cuvW&<&y8gfO7-m;9&93M0 U%uxx+35mlqladmW5)#Ay2A}k~%K!iX literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Virgin b/lib/pytz/zoneinfo/America/Virgin new file mode 100644 index 0000000000000000000000000000000000000000..447efbe2c967cc5642b58f51aff86b67073134fb GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eCs%|1G7Zf>_9Zu4~Z%O>Y+=TT|-i_D#or}HywB>z^mx%Znn;vbo9 z3O>9o^Uh6}`%YaKomi~r@Bcs+M00du=T~ZB%}@INHSenjazE2WB`H-jIc|!xld5>^ zQ&aN8R<(HeoGBgcld{wi6Bybo<$W)iiX$tfvf~Y1)w58RgnD&#%QUGeit5^`?`3IL zPzU`}Rq$rMUiQaLwfy5dy6*ZXYDGHFJb3XP_0YiY=Ha&vsYklLHjnn7k(Euu=CRh- zrM}#lRrT9t)i0wuRCZ8>M$hYpoNcOM=(t`pzE(YcV#qXp(JA3QN6p&z8l`FDPSboW zAWy7_nI{uUTKsh;vhJ2d?i844&F2#RcDY_Z?`O6CBfpMKj;al(C-u`~=hepk8~T~y z<7!juW&Lbwx7r*YGh1FeDqG7gnddroN^8!bX$!@qZM@%f6xB(`)pirlDvevASKAXI{leKv)fFw)-2*q&i?w&_9bK1IPwu$xZ91)by%{DEyd;USAI+}3 zLD@BY)$E?=mzPs#%`0EF%bvc&X77d7(zmI{?0d6BGBfPwt|^&QGpA+!2OrPd-|F*u z-as;AiamGxrn~b@_f($uwq(rW?3?S(1&!f|FII2w7JENs?`FQ+7tQuXvz05uJ^wQD z?>k7^KX*Hr`1}7nd+mMo;0a_G$TpCDAR9q;f^5ZU_kwH&*$uKCexCgx8$x!3Yzf&D zvMFR&$hMqzU&zLgogrI8_J(W@*&VVyWPiv8ksTsiMD~bm(rI^zY}0A?iEI?vDY8{$ zugGSR-6GpX_KR#7*)g(ZWY5T^kzFI(cG`U-8%K7IY#rGy6+;mL#qdH|>KG(~dPEd5H4L$g#~X?l{56@ES8A4* zQKXJqt`$0$;$>uN&aIq7lq8K1m7GdGsavST>HqoH3%A^S)62eR|L61Y49+=k_dv4M>sp);n{V{&OoR@O9HfWpF)^Bt#n76xdGTXgaw?%*6wZW;YEYx4t z&3Eccv-DD)*J&t9)XT|3oGa79^=f3W)0i|ue(gTky%rHB*KYT8ubXdky{g&0(byn2 zYmU23CyvW+8xOm8^2_vY>`eDw*A%_)Um*9JbLGLcOldivEPtLHCao3y#rJur$kssd ztZ9`ti*HHWc_G?vMkj5b-l82wHmP4so%;7atsT1^(E!<_0q#!iRKH0(*M1{`M;2+9 z13RSauJ@(ewpH@Px`oocaF#rommxi-BuP+mg7h33DNjX)NN}Gq`m}$GgtUp!klVr9 ztD&QYR^8WUzP+JgdoF7Dj#JvZpsPIlUb*&}^t<$(^MgDWUoFq4d?hageJv6FHb_Kk zl|}~4m&m%k+V8qo`d5CaQPqwPC|#`4dnf3?qA41)X|TrT7D(KJo;oNgQwC3H*7%5F zG9>ONebFye5__c=wo?leVx8svQkFoAJJEeOZ3&W zojNLKg^t>vUq5n2riljOiOH+kna*q}^&vurUZqjAvQ=H{Ri*!Xrlv6Zqvaa0P#cAWQueR;} zx%vCYM_t=@_-|j2{lG_kiHD#0d}TL9e7*y_J?(tHFSd)nz3*(V-5!2EHq|SmMw>Iy zoXO^-d(FSNh{xlx5b-`<4~NaKT0J!LH)cMwoGZIOfat=~C38E;7 zrXZ@aG`fN)%hG5IqArNOAPR$M45BiK&LB#IXwA~74Wc)Q;vkxXR(0Hlm5 z4E;eA2+<%!g%BM=lnBuxM2(h4j}S#dG|5mUM3)R@GPKE1CqthMg)%hCP$@&F45c!( z3Q;RVua-u!5Y0kV3(+k^xe)C_)XUH>M8OOVGgQpbF+<4=En6BjL-cHE6b;cdL)8#n zLzE5CHbmVJeM1z^&^Scp44p%i&d@qU?F_wJ8pSg-&rm%>_YCDjw9imKME{HgKr#SH z0VD^OCJB%%K+<4o^1w)hrO5;%6_zF!jATHv0Z9iWACQDVG6G2nBqxxhK(Yc!i>1j6 zBr%pIGmO+&n%ppw1IZ2}J&^n`5(LQ*BSnxLL6QW?5+qHKJV6p=X)*;#m8HoQBw3aw zTaa`?^2JCPBx8(}L2|}O8YF9sv_bO5NSvj~93*v?CU=nJS(@xY(g(>OB!Q3&LQ)9H kAtZ_Ne-n*Bjr&Y4hnQ?er4EXYi;js(jg5_tjgATU3t;rI(f|Me literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/America/Yakutat b/lib/pytz/zoneinfo/America/Yakutat new file mode 100644 index 0000000000000000000000000000000000000000..f3d739901c67e857352693dffa07b52acfae431e GIT binary patch literal 2314 zcmciCZA{fw0LSq^2wWg2N{9#PcJcw_(E|!xiZT?UD-sabn-Ezjc88TDDZluJtsF;g^Ht4P&GX;+e=hfKy!(B9 z4J}XSJO4T^^9`3XU@o8Cz2yk+U-Tr!jTQx1&1d%PA3iYFOWTVpQDr)(0}~ zz_7S|?~uG>YoD0g(ktWZH;4r9dU#LCe=`R9-;I{Pujcv|^WBRu``d(wk*r z{_m>D^^o+W&J~_Zi)Hc5Z{prhqUC*4pNkTmrtg3Mgm~c9NWJ{vLGfVgWnH@aJ+-3x zGrh9;fGS(^jxI0TuF8M8AS-ePMa8+((i^)?c#n<9RaY8Ckjgv%f^umIU;poo-c^>eW8KqkMm5X@9{*<^S$sxz-&NYd_AE^|Kd5!$^#LeEPI# z+TDUU5ZuD>~ELPPGpMbI@id}I!3hmi)7otRk5WoSw7!7A=(pWWJmQ;(Gg74ot|;k zIsJ$3O46!pY)Wsvwoh#v8r9ps=ukU)hIRLGzv@}nr*{r|R7mKYkgyv*5#cxS2?o1E z948p;YjeVa!GYe8Nc)bl?>PI8x4$N@uJSqcb>=>1ZjHHRKBu(OzRb7gnE!Jf-+zxO z()`Zlyk;(+-PicboS-?&Po3Y1-Tg%;k&z)ovzoD?9ULDR9mfn07#}h~WQ52Nkuf5J zL`G>f!$ihuH3LOPiVPJQD>7JQw8(Ig@gf68MvM#@88b3yWYoy8k#QpfM@DWnL${i- zBZEgqj|?9fKN0{W0!Rpu7$8AFqJV?}i31V{tBC{>3ag0)5)337NH~yqAOS%lf`kN# z2@(_}Do9w6xFCT+B7=m+YGQ)~2Z;_69wa_UfRG3wAwpt=1PO@}5+)>0NT94HQb?$* zCRRwWtR`AWxR7`u0Yf5&gbaxp5;P=gNZ63LA%U}+$RVM#n%E)1vzq83;X~qw1Q3ZJ z5<(<~NDz@IB4I@0hy)UeBoa!ii6s(DtBEEOPOFJ05>OGB0)u>ii8!3D-u{F zvPfvHCbmd$ttPric&#SBNPv+DBOyj&j072pG7@ITlsPOk{DzM!;wCU(`+8+Tx(`+9*$O9DhHc$ zexu3Qm0wbZV2&ikizt^$6v9Gc-jpFni2l%||6m#G4>+*)>v=!7U3C@om2+Mq9K3nn zpG0eFTZQ?@-Rk~@54YES`1?F@U!%qO=A*$}%`Ew&JF_+4+$e4Bo?Lcd;MNb4~9&en}Rk7udUA zJ}rwk26oBzkImhcU)y`uy=|7}eQ3kQYQj@jwJ5aH6pdfd;%foSF8*Tg z|MI+9H4@SX&L1TA)HL)wF zZ2hcuQ-5~QuDia*G`x0FAN#aT;=Kd<_`9p6u`Q+RhZe~bs~WT^oh=&*BbwOwoh1I8 zqs^5UrTP03+p_Rm(=s;OZk`%5ttTezljEmMTYttrH9Bb8JI>iH!#m8=@yoh(U_hRU zoY7~KDe0JXSUY14(s@0t$#6uHms&J6eU7Bws?qH~{3<(!HreMc&o*ixk2#$NS5{glGoGI zW8#|l1Enf-u!z}-Ez7zlAZBP1N@!tjkZyb5&$XoZi_l~@I4>8xq$jUy3hB!0+Cuu` zXbkBLX$|QOX%6WQX%FcSX%OiUX%XoWX%gwu>)J&6l%rLoSB_?pZaLaT`b8Q> zI!0PXdiJ`ek*>Y2ZKQ9nYaHpEqjjWrqZflUeK{f~3 z9ggim_J?DGkR9UKB4m#^HVN4!uiGYMpS*6Pke%|ntwQz+*(_wYknKYD3)wLIzwB7Z ZE!;{sZYA;M(TZqUNxZx~S{^M6{S7_J-;w|T literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Antarctica/Casey b/lib/pytz/zoneinfo/Antarctica/Casey new file mode 100644 index 0000000000000000000000000000000000000000..c2a990564dc93ee0b0410a2c5c66bec405c57cb0 GIT binary patch literal 272 zcmWHE%1kq2zyK^j5fBCeHXsJE`5J)4Ke;mvUbom3y!%gm@LM@Y!Qc7p1O`SXMkZ#U zBp4JlfK<=gz`)7C0Ag2FRWUe*2Zt~?I|2zHU4+J1bfM}3I eKs3lPAOLa@hz2HJp4%`rFfcMO0foSzX8}kZkmRVU rs$vN64PkI`2@QcthLB*%52&91Kmf7~M1w2@(Ii>Q1#*C{p#c{Ftvo8a literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Antarctica/Macquarie b/lib/pytz/zoneinfo/Antarctica/Macquarie new file mode 100644 index 0000000000000000000000000000000000000000..fc7b96fe873af37c6b89262a2ddc04c4a0700262 GIT binary patch literal 1530 zcmdUuNk~<37)QUWzC?4V6f=jH+Tf5w&&o9QndU4}kVu+YP?;`*78UX+`5^0Ua#KQ* zfo6!S(8Pv?U}oUTXi?ds0c#Nv4U4eAssFjX=DOQNh&jp{$5(bGv9<1NwHu|Rbm zPt>(nha|4HRpa%sCKPqay5^6vKH;2fsF;+EK8GYRc|a1s-Ib(I)0+IWQ&J|!bRbjRz#&_&5LfINtNuEZ#Ad? zgYFo4q`9Lhn%6fi`Q1V4zA!EYt)F%0u|6qmc%nrmUfJay(&F@+TH@@`Qb(1P`QMhZ z@4Zs~{)$vgwMpf~X{ox`D3*`qYxz0;!Ec%Q7wui@5kHY@gI2)OlV9hh=Mv)ac%-mo zU$wavSIh1Sb276mYw3gPzuQx9-nH%d@%y7iv$Ku}f!G8w3St$+EQnnY!yuMH+cXZ? z#*A?Q>lo%S?1LD{u+XM45n`iFV#Bzw~5ZfWfL#$_*53wIo0HXp%35*&TMKG$cY06;K0V#x0 z38WN8Es$au)j-N&)B`C9QW2vhNKK5QAXV8kWkKp<6b7jbQW~T-NO6qnAmuUYgA~Z9 n5Ka=MJWmL*2l~F6B*#9KiKr&8qxdMLzs*O-w literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Antarctica/Mawson b/lib/pytz/zoneinfo/Antarctica/Mawson new file mode 100644 index 0000000000000000000000000000000000000000..6c5b0fa1309c4ab0c7cc2e80854ed3f0adc1f88a GIT binary patch literal 204 zcmWHE%1kq2zyQoZ5fBCe7@M;J$a$n-)ZleX$$^293CIJ3kOT%61_rkP29~O-Dh6N2 d@DP{;31iG`@AUi-@V(sAq*{N&F1ptp785jTn literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Antarctica/McMurdo b/lib/pytz/zoneinfo/Antarctica/McMurdo new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*V3}_OTly7 z_5QL95wk)c$O)HK>A6}MK$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<%kQ^aNLb8OU3CRk=!H6N3xHk-}s-j-`5~7Z80uFpOj9s}nr$Q88TDx)-)%rSG#=H_7inRAs9BzpS2e^gI;)RP``?#_9g-Mf2t zADs8cUs_$6<^AJGvhQ$mG})8;8#dcZ@2-PhZ%?c48xb{q9j8?PrF!}OR779?B-;$M zm8vTTO5|!wwhEpJOX8lnIw`gGl~Jlqrz9*hDV>Y-=$@Ho%;6k;<5y)e_JuL}rgvt@ z&2{7Va~tqtB{|Ii8sm+pj16+-D}Ac~#x^+o$ICkK6Sf zr{6GXueIv*_qUlTTOQY;mugJLs?}O;-YS{-g?egvwPZ~{t->--vWFF@Nb(W6Goef8 zc-KhI@g$Y|eVUoJ{|9x~7s)2?=`Ypvw=S9)HHXy9ZJkC+kLg)e@0t9p13Efqhbb8G ztS-!aR|@O|rB*Tiw6CT*{7)Qp+|p zn&o@@)e3*PDSzS{wK6PbRYkj6J#vDnC}>p`m*P$3ln-^~+3`}9yjxeb$4T{tW?j?L zEo)A!)N30%GZ)b1=-b$Q$6!A<#U{lquTLyJE->pdH%KH#m4;gijRv=9Fp*#zU)J1 zk09tUUVIRAos3Beg7fFNcAfn5on-gvb%WQ2h6Zm|iEqzR--{Nxi$C}yF)@GZ|BL6o zZ}408KLi2Kztc8=bl|itAUz;WAYCABAbp@~gd5ulKTj)2FGw>;H%{9Q(ht%Q(h<@U z(i74Y(iPGc(ihSg(izek(i_s8({_inhxCUuh;)dwi1dgwiFApyiS&syigb#!iu8&! zi*$>$i}dTX4I>>REh9Z6O(R_+Z6kdnjU$~Sts}i7%_H4AZTm?7PP+lf4j@~A>;bX~ z$Sxq;fb0XZ5y(y;TY>BavKh#3Alu=z`+;nT)9wheCCHv2n}X~LvMtEIARB}146-%I z-XNQU><+R$PP;$I2085xAzS3MdxUHfvP;M|A^U`E6tYvuRv~+ZY!0D9 z?ijLVPP=EwrXjnAY#Xv~$i^W%hio1GU-xckw8#&$hUvw*q0CSuV`@0PI2;PQyP*NIEHLH*0H1~vzW@LL literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Antarctica/South_Pole b/lib/pytz/zoneinfo/Antarctica/South_Pole new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*V3}_OTly7 z_5QL95wk)c$O)HK>A6}MK$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<%kQ^aNLb8OU3CRk=!H6N3xHk-}s-j-=r^9do;`;hp3mED>x)-9 zf1K6k2^Xi%T-+C4FkiKW5vMksufeYMQnzJML%|gZ=fBWM*&B&0y_fpDCsLoAmEG@d z$e!$NX_$Mg(WyykymMXmCX=#n@}f2!xTO0N-5T4GP`4qX%_WW6;@hSz%YJPw4omB+ zb$amIR%v_iRomyia_Cx-blm$ShcjQK^Kwp(bk57sv5$Ie*F!npF{fRn)6!j&)%cqG z+Otu$=ff54T^ZEA*_57GaP{P^c1=7VkW&*inw*Kr>E2T9AFq~yrkyevFOf5WO)?bv zA;U$Ej8qis$mh2*x^`JdUp|x6(rZoMosqM-M|y7hsg7l@>iN`7%}iy~$@8rG*N=In z_H5Yf`SrdF?9YO9I_(5QG1qB!gO20z*vtKkvA+NQV_-C^84ei_84wu}84?-OY6e9{ zwVGj(agl+Mk&&U1v5~=%(UIYi@sR+K2#^qv7_24;Bnqnu1Bn9(1c?L*1&IX-28jj< z2Z;v>2#E*@35f{_3W>^U!b0Lg0z)E0LPKIhfsw##s|KJda013wZ0PA32 Q_zwgGjO7AZplikj01<5#G5`Po literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Arctic/Longyearbyen b/lib/pytz/zoneinfo/Arctic/Longyearbyen new file mode 100644 index 0000000000000000000000000000000000000000..239c0174d361ff520c0c39431f2158837b82c6e0 GIT binary patch literal 2251 zcmdtie@xVM9LMqRfk?4(Z-2m9fRIS1cJdqi5tq^l%)GOd(?~^75wR}H%NV3anRBi& zeCt@|$f@O+HN*a(`~$6(+GyeBFXpUVTdb@!xaOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VPi_@% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Aden b/lib/pytz/zoneinfo/Asia/Aden new file mode 100644 index 0000000000000000000000000000000000000000..505e1d2225d6f52087a55dff7ffc8ee6125e548e GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$hnnh5yrsCz@ViClF~9@VDa${VQ>r%0dW~ZNHFaO TSO){ce;~kbDi_drU1Kf)CYBQM literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Almaty b/lib/pytz/zoneinfo/Asia/Almaty new file mode 100644 index 0000000000000000000000000000000000000000..75a007deeb1c343c06ac4bc95ef953a1582e8aae GIT binary patch literal 936 zcmc)IJ!n%=7=YoEHWrPDLD2lQnrbxI*fvRRNE2gE97G}hpcfHQC`DTe6{LhNtte;* z!9hwVaR{Vj5Eo0klnmeC=n^PhV!^=ZuH5@^bOcEg9^(Cy97W5{;xJ12=VY z!!JYis7{r)C0#wHFO_^cv-(S4o~!HO_v>0e+0fZnKjq3`C8!+jZOzMhpye`Ktv_4e z+m*T&GAE#}-d0YfMxQwjDP@iD-h8r?yJ4y9zsFsRua(1Lqpbfp>EF%2s9m!-apct5 z=H!vnM+(@Q3P=g022up6f|Nn(Acc@hNGYTiQVgkvltbzv1(AxjrX*4mDaxWMQWmL; z6hFTfqVAU6BD7Rndg=qMqLZc7s|U6_e6G5$b__&3f1{o|OYAn6drRBeD|Wm6 z;nF^I(C&8VrgY!c+U`;NMS8B&KxF8lM9ppp^vb;;y(d1fql-^Vp8@CXzR4BRFJg}! z6R}uh9#07Le|lD88%yi~r{<`)e!M+!M~3>UkK1wUizPmPpC%Nii$AqN291l6#HhWJ z6z!J5u8oqsY>Onn+@nJtuh5~_&&jZcLLGi|x1`k7%7`sBGP0&pMs3fL(M5TZTK-7dMgazW?q z+br`h*6D)H2eiCygI1Jp(uFmpx@c0lhKe$Eu`i&NStYWhtzWCW9=0jFD4zH0O+*G;-F+X(Q*2oH%mk$f+aej+{Jl_Q>fY=Z_?SWPqfA9LUKZqLb5{Aax{4%i6NOGsUf)`$sySx=^^Q-I6?G6~2mAk%=%gQJ-UWF{QVR3LMKOa?L=$aEm{ zflLT8BL<5pLFNRR6l7MAX+h@2(M$|7Gmd6zkhwu72bmpYdXV`+CJ6tt8FHId8t7-A YyxD_%NxsB5f1)=#A>MpUic4^R11fmc_5c6? literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Anadyr b/lib/pytz/zoneinfo/Asia/Anadyr new file mode 100644 index 0000000000000000000000000000000000000000..766594bc979702a965c2052048d269085e6f8d31 GIT binary patch literal 1197 zcmdVYPe_wt9KiACb?<7egHV{xwenB8%x2p%u}s}k6Wd%!J1N4CvY@&|bO<6aDhM(t z=ujplR5B!DhY|_{4>s#VC3ojp{b&vKfIyj;ApWYl`WpX5ia?BJ?5LuWlN`vV*ber z+4yL^1V(JxboHBTKKDt=PQI1$zE@I_cqWxi})QR`kn>Z6)2E~ z$OmayoRh}VdENN_rR@IkPVbq_NYm^Kz4u{GH$T6pTP~03*5L~}cJzvlU%V^(I!@@e zBZJajmDC+=hh@LJPIp$eOXrt_bh)af>vc#DeDp}-aiJuqqxtS@OQ)3ODsb7Bx!kK( zT&q29+gf?>_r3X;Qey+=e^i+%Wh<4_)*7WIX4p^6uwOp^z2{^&%UjNq-Dwq1#hu5Y zG{2&h#aiC-O#j0?zyJE$l3C0ZnJvf67nw0KXGb$@WZuZkk+~zYN9K<-fOLSgfb@Vg zfpmeif%Jhif^>qk;%It7nnAj8H0>b$APpfMAuS<2Ax$A&A#EXjA&nuOA*~_3Ihy8> z?i@{fNPkF!NQX#^NRLR9NS8>PNS{cfNT*1vNUunbEz~?YHdqvUSG1eqJxu7UwcM6Q45irg=ZU{NrijYpvS4l=hP!uUAuVD|*+f zYr5^pl-_-RLbu--(f*YIz2|I~-dj%TeS^(v;b99`;FgR?m`^d_c`-HEH?k87%UVOb4VTFi;ux1#X2gc=${ z_S(8_!xaiQg(J~Ow0ZmHD=b8xZ~q2SD2fIlO2OK0&!rM>_gdWf`m{%?3#a z$p=XY$p}da$q7lyBFG9!3&{&f49N^h&1rK(l5^VZko1uJkOYwokra^}ktC5Uku>o? M%`;+CmOZ)1Zw_Yfe*gdg literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Aqtobe b/lib/pytz/zoneinfo/Asia/Aqtobe new file mode 100644 index 0000000000000000000000000000000000000000..ff3b96b3e9d49adf945c2f9e40cf9cbafc94b19e GIT binary patch literal 1052 zcmdUtKS-2e9Dv{N^z!}$w)pQZ?bFOJ|J8Nf$yuJ>IY^+vt9}rKf`f`dQzQ)uMjQ{-rw`x%Xc^2^SHO~%*R#5 z3Y!}aD`5`Lkw<2HXnL`*u~y2|InN69jXQQjplCPz`mxgZ?Xb|a=h%_Y4+_z(oSgl* zD9!KYrRDi8Ik$8}?7RKaI@Kj@V^KLja9Y~COL8H4ARXa-ar|NJEI%!Dj^<;XyX$t> zr`Kn?*LQT!`=aiBwW=@WUh2#Db2>J=sIN?~Nc`SIxte|~iHT`RHqT05d`S9ABa(_v z>C~5t(*JuLtIo`Q*V&ws;pBcyS-z_3DxcZ@Q-5t= zpe7Iu{d)(LO1?3FgGyyowNfLR>CRX|X621l>%CLwy>t9LcGFeaQP&%jW{$h%>GHEm zSyuU!Kbh&Cz|ha*Z~ZX~QxIERV+>*qV$P$n2Qdh-2r&t<2{8(>3NZ_@3o#6_3^5I{ z4KWU}4lxh0@6i+hslcNt0a62`2uKx>G9Yz83V~DtDFspsq!>swka8gPcr*n;D)MMb ig46^l3Q`rMEJ$6D!XTAFN`uq}|DWPQrb)Lq89D+TW7=B) literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Ashgabat b/lib/pytz/zoneinfo/Asia/Ashgabat new file mode 100644 index 0000000000000000000000000000000000000000..f79fe0460d1d0fc208b3c2fa1062ada8cc3d6d24 GIT binary patch literal 671 zcmc(cy-UMD7>6&~YMTljMB5ra;%BTPh@dDqL=fpvAa(%NpBYu^w6E@}O&D^=K+gagw z`J&eFyS&4wJAdWoQkT=Yx98LR!&6ka_1vDDoye&lsNVB!)pt@={YPtRV6UXytpzo> zF`6&~YMTljMB5ra;%BTPh@dDqL=fpvAa(%NpBYu^w6E@}O&D^=K+gagw z`J&eFyS&4wJAdWoQkT=Yx98LR!&6ka_1vDDoye&lsNVB!)pt@={YPtRV6UXytpzo> zF`h5IQ4n?-ShJD-tspnXh&Y&6RFf%}=+xa?v#=Q>Po3uV~MgDNBF+rKY!Cws$J9=4Y3b zxqn9c#-CWr*j)=Qd{XOBt+ho%Y7ZmrZ@p~?>aSbJ<{4#6CY4?KZn=fL4$fV(Lo*lc z@Z?M7pFUFOy#+gRbI!W1rWB40Soh$BdJ4DH+xfxzGOz7uT~fz3EURBROO%wZTxH2M zTvGb6{~$^~RGHKis9eg0iS>Wq7mgRz<4^YqfBK5Nu0$e+imQM5tba59^50heqThbQ zH(q>(nBXxsVtOMGD-bgfI}k$}=;t%Lg#?$}+ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Bahrain b/lib/pytz/zoneinfo/Asia/Bahrain new file mode 100644 index 0000000000000000000000000000000000000000..cda04a151090d4314f21331c8f95e537ac0abec9 GIT binary patch literal 209 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$XWPlo(N0Vqyr3$ObiVAIUsop2L=`f1}y^y4jU1(+c-wj5Ql~N7*vgFdY>hOX)Y_b_aP|NEsjcR#HsAGqcAn33o&)dx zpHHZIT~m%cvWeymAJ$<$JSVBSeU{w6_|`WQ$`Ug6hM!Iv^d`o95lH;abt37Ggp+Uj zyeZe-52v2{(0S(QyH48vEzYyM*E&->tDNUrOPpz|{m%5#Sx)-=6z6$QoHHYJ%y}X1 zo|6%kT%U12wtnXMionbfcl)fN>w(!9PkFOzZp7zo8i>!i(jLw&I3{@&z2Zyjll+|Z zlK-$n{HZrt;;6G)lK!QXjD9UkB2LMY!85Y-`+iw=e7}@lJE_4v-Lm}XN4lb|UdncF z)$)~{TG6^vE9cZ|sC2Hrni$Y3PpVcun69f5{Icq+NPX>oid3KeL0=yllhp?zq-Nkd zsogmubsvYNzV4E|(eJX1C`xn*;9ylg4DD_i4#k!@Mgy6xv7**@t< z-G2SF>=?bOZ=Lv5-X1)roqGqg>v*4b@AyP_?&(l*MLaQnMMnMOdNL+D#vL2yp77W& zk<4@E+{kRNM2Zxbx!fZD0MFz1zu)lQ{LNFmcTKUsnrHLtzn)8jAz4_s%-$+PQdrH$ zJTR9Tcl%}hX1;{}!vX*GuYdi+jDrq2?7Zf{Lk>OU;9Hu*4-o(&0Yn6d3=kn8Qb5Fj z$N>=qA_+tkh%69cAksj@fyiTN1cFFpX+(m^1Q7}%6+|pcBNs$4hGY!U7_wO!;TY0c z8u1wNK?Gz-2oVt?BSc7uln^l?axw&kNXifuA}d2!h_seQT!y@sMqq}-43QZ!Lxg5X z4H26mH$-rV&qQVFZBrc4=KqA8k4I?&;;4q@Y2oEDZmL@=q2w9pC tF=7M>5+h2GFhSx32^9QaMk>~X?4>+P<_YFz`ZB#)d7fal*FO1T{{%TiqEi3> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Bangkok b/lib/pytz/zoneinfo/Asia/Bangkok new file mode 100644 index 0000000000000000000000000000000000000000..e8e76276a657ffea33afc25ea56864eddc7f43eb GIT binary patch literal 204 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZJkqO9~Ij6jh%8z2WnvM?|t6)B` zXK;1x#$4xLM~r=iC&w%H$#ud`dwMriZ4S5Yk|RrZs-wldI+C7Y-pgpyo%dWdotJxL zD07b)8u!Sh$)BoA14>^W9yY^$VYzZ{vl;2QLq@xgs|?z<1w8$?lm*dzonA~)~nL|Dz2IY&k(erTl84NkYvnrAsW+MZ zlXO;Zzsf#xTs+}!<=y?d%-y0-Q$;7EW#Nl6RPmcX$^)T4)PuVny~uw-l~jMPA4)%^ z{3T!Mhhq<^($t799XV+h$L!UM```#Q7qSgkB;C^52ci7a2G z)MJ}GvZ7#-3YJZil{3;*dDbsd{@X-Vaa)3}_-;r&K0c~f_nlQwe0yH6IrNjM?EOfu zZ98qM!Uwdf`oOH)(Ie}N_nGR-uxyysU_wRBvN7&uRg+vHHAD64$?Nl__VhAUH<%<( zb>^r|Ul;27y|=1{NTPna=2!L1oq1s&2>iMrxW7{*@G`3b_A?=kkQ0i3N}*B_ z9K=x^0@V)UqTnKG-yoR5MQ}JMfeuP>u_7HBj#}UMQj6&1=zHAnd%RpS(e=(J!Bh#+(spdS58UUR{&T zPse4;Ls#y&J1BQv@0RA3gxqzmS?(U%BKP$Ckb4u0a$js-w$^=+@!(1(zVtP-|IM^K zu+U{4eEZl;yfdsrk8Yc7GrrY+t6+9al`ZRH&OAIeYbEpJ#Us6UyrUy$N}Vm2y;L$? zO0DF*bZpd4mpi;;ewW?-y58%VGwkCt?QZW&-##%FaZf%d+nGz>-BXj5QeXC?+vndm zMPTEG>JD!DZEp_Mtnb>;A3H2W_M7@IME|e|3UT3TpjHUyJm>2CbyK|$Y}Icz@F{2Y zPQy^%5&a{hdYpDdZa@!*9~A+$^kIEC{~&33?OUsAAqygmYotNMLF8#Ffe?uhkr0^> zp%AGMu@Jcs!4Sz1(Gb}X;SlK%@euit05nwukPsj-K!Sio0SN;V2P6 zL<0#25)ULGO%)L&Buy0)Bq&H!kgy~wB1_=!k8zeYLbejK5c;VcD)0PVV1W~Qu A*Z=?k literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Brunei b/lib/pytz/zoneinfo/Asia/Brunei new file mode 100644 index 0000000000000000000000000000000000000000..1ac3115acd19ba3a36d85ee937df5409d2d6a51d GIT binary patch literal 201 zcmWHE%1kq2zyQoZ5fBCe7@MO3$XVOPy@P>~iGd+|4@f?G{`j|8ss7{4RjTV2DyxZGN*9?U8-wpWo*C&03$>`@&Et; literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Chita b/lib/pytz/zoneinfo/Asia/Chita new file mode 100644 index 0000000000000000000000000000000000000000..c09065470ef9cd5a257e56252dd05e416e67bcbc GIT binary patch literal 1236 zcmdVZO-R#W9Ki8sZoZ^Yzk{W<%$2TXzErDZX3KP$%uNa%gh?UvLRrL!qM`_)*Tab4 zp)S$Es0c#3WF5?#3KbH)c(M)yJJiJ_IwTSGeE&-jqC>}?KmY&d;n{ze{k}`=K724} z{#XU}gqzi5Z}x?~_Hp~&OyA=>qh?{CBe|&fO1dcT@^sP6)9&I=gUQ7cQ|TpR4N31% zT$lETwd7WfE=!ea={Zr~QI{_7`l2ghlUi0kuH}`Z>i0g;l?C@T;CU;7uOqVR!?3U7 z^budh_}TR8k-e`f2d+xh-7`{sy+>*;cFCF(aap@REbI0h(%|ukt`E2BhIYShEUD7k zV6N8wn6DvkxrE+NYu)!8*);S<>nA?S=6jzdJorMkTzV}HH~VGlfk)DK;h8oy-;l_@ z2fEFFN}3}n-JabkEoH~FW$L6x-8(h&g%0Ba)J>h)z9X_@D1)2BbZ3)xa7;_#S z2@`F%Tb3)coPFkz_=gF9`~AJ2b}?yW+AJoHOdXlLqn$od08#-`0#XA~1X2Z322ux7 z2vP}B3Q`MF3{nkJ4pNV!EeNRyDap~+gcOBTg_MQVg%pNVhLncXh7^ZXhm?oZhZN{& zD@00kv^63{B2^+~B6T8#B9$VgBDEsLBGn@0BK0B#BNZbhJKCC&q8)A3NZCl;Na0B3 O`2Urj&%dBLl>ZBWJ_9KL literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Choibalsan b/lib/pytz/zoneinfo/Asia/Choibalsan new file mode 100644 index 0000000000000000000000000000000000000000..f099092610d152c5af06ce9aed4e5a639688664e GIT binary patch literal 904 zcmc)IJ1k^T7{Kv^A@RssqVO7)9cDbnjAz6cuNkq)&QQ1#A(BHxBBMBkPQocff+=pH zL=ae~gp z_ag1w)~Yt$xn%&s%r8=R)0NL zpF>B|)9_u@lqF?t?xym(4lVD)H{17P!>YSpx9iUqtcH_>-MBw&`M2VB(`wKPEC%f0 zY@yXW6;mzofNTvnsWyLsY+Zk|o$TuF{T(yBrFGBhirKq3qx)v3&3+@Q z2ci)((pIaZo*Hw|9T7u+k8F{a?#g)mCsT;Txbv6Aj1n#(l;*x{ifkd0dubn1PqJfi zLyX1x4H5Y=lDdr=yz>7TZA1?2VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pXt<8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Chungking b/lib/pytz/zoneinfo/Asia/Chungking new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pXt<8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Colombo b/lib/pytz/zoneinfo/Asia/Colombo new file mode 100644 index 0000000000000000000000000000000000000000..d10439af138a65d4aca32354641a757241a75f10 GIT binary patch literal 389 zcmWHE%1kq2zyKUT5fBCe7+bUf$Z4OuSLSq-jK-O%+6yj~Hdq)gW6&@vWKeLif2G05 z#LU9P%ErXN;B^P2(d!8V3j>311jvMt1V$bPhUg4NAUisPK@cPYB0;h~z99_0KqL%JONS;@(73q Xc?Lv-Jj6g#A8`Twt7~dyY`_HokS0oq literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Dacca b/lib/pytz/zoneinfo/Asia/Dacca new file mode 100644 index 0000000000000000000000000000000000000000..b6b326b20eb4cad1587ac068dfa868e276019a1c GIT binary patch literal 390 zcmWHE%1kq2zyRz(5fBCe4j=}xMQ|-~q%= zp+O-Gp1~muE{@JXo(l*Ad8i1i-0oD2+2r}E+_JC-R(?K-I`CuC81rQDL b21q%`D#0B)Lt{E2qh`3W4 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Damascus b/lib/pytz/zoneinfo/Asia/Damascus new file mode 100644 index 0000000000000000000000000000000000000000..ac457646bb0205eabde9526bff8ae41d33df550b GIT binary patch literal 2320 zcmdVaeN0t#9LMn=AVP>}q$?i=q$0+{1+IVv2o@4)7yAVl$Aij`C<^-QiAQCDV9JJKko`<^%M8aX9q4b&U6`&WuNeM3f6$$SyL zd(em}*dp%UHOrot-6ZDM9kdsCKQd!u^Nocwmz#0HaYo$L3Tx3%FUor^&QbAaXUc@5 z2`aIxQYUp5Se`v?GP$i)rPRNtQ!9n?$~3(==b}nWeNv`JeX24dJN1&lSFI((=jFYF z(bj$6_+{qlVwH7bhtBTZZY|w^Qs(UZP~~pEte4g0s{7Y8>IZzAth}YYa(PmZ%8$Gu z9}Lg13gUBQ!Dz8sF=eA(ach;biqh=2vok@Fi6<`?#zbJgaJhzRI@PYrK>YcWSq!Ak&z-pMR%+`Fj&qREyr+?@gf68MvM#@88gSAkx{!k!$!vK z>I@tiImghEu_J>=M$a*PWc(ZfKq9~q0we|;K|rFw5e6g<9DzV0frJ8y1riKbCmKjN zka!>gK_Y^L1c?a}6eKE0Sdh3Nfk7gJga(NX5*#Etu1p(MIwuY7KtqqTqL?kc#-%b0d{pFjD*`%s5D a%}(-C@5I8zo^(%oa!O)hYRavru)hIHh^b=$ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Dhaka b/lib/pytz/zoneinfo/Asia/Dhaka new file mode 100644 index 0000000000000000000000000000000000000000..b6b326b20eb4cad1587ac068dfa868e276019a1c GIT binary patch literal 390 zcmWHE%1kq2zyRz(5fBCe4j=}xMQ|-~q%= zp+O-Gp1~muE{@JXo(l*Ad8i1i-0oD2+2r}E+_JC-R(?K-I`CuC81rQDL b21q%`D#0B)Lt{E2qh`3W4 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Dili b/lib/pytz/zoneinfo/Asia/Dili new file mode 100644 index 0000000000000000000000000000000000000000..8124fb70b2d7522214a8cae502b094653b6ec192 GIT binary patch literal 309 zcmWHE%1kq2zyNGO5fBCe7+bIb$eHv^;>hWlMjtMPy({2;Wl&&wcqRh_BNH!p3YYSom>1ZzyGD^s)SA0lxLg1y<&0V?Nu(0 zt79GhZq%7MPo)!Q`Sk0Rks5RUOLS>2S-9`tDocKR*Q{R|h3=>g#IDZTPVon?wTMmzk2dB5PYtvq-o~ zU(4?ab34;koDXGsaUl-Na4zJwI973E5&q%tFJG5FtZ)GFz!(=09}p)o%?rd0#1F&~ q#1q67#23UF#2dsN#2>^V#3RHd#3#fl#A{4*3;)}1(xi`0BtHQaFL;3f literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Gaza b/lib/pytz/zoneinfo/Asia/Gaza new file mode 100644 index 0000000000000000000000000000000000000000..bd683e831b68d1a2888c3b3dfca4b0735acc8c57 GIT binary patch literal 2313 zcmdVaZ%kEX0LSrj`5%@WmCZFnOh7ca{J9sBG!49<80ceMEKuMffieTHDHbV#uDO{e zqou7S;R|!4X;F=_a`eY#E2Vfc&0KRmZK(_~uL>eGr008dz6sZhUUkmS`P`k|^XBgU zzTTphl`iWaN2qzi#nEIgK2K;cw{HtS)?H`565sE?ZvF60zv^zR)T*^s{rIZX$6ou+ z_fth%!|_#G^`xzkJ^cxycigA4_k>jE`-c5}?N!!=?t1^9Z(X)7wteFB*TytlYC7w` zTvTXXS$n^)f7bSffr1v_)yRm3!L(!k!O<*hXkv(O==USm@KCz{uW6YgY~wZ^Hqxf< zxY{ej_qo)h&#&nRwq8<^U88ccd|O0q{zy*AX;%-HysM{16^rPiBpGAP6|v(k=-8jn zs%huDb=;SMV*2quIiq#5dgx$%uuo>)JtDHlpVHYqt9|qPCh7$p5h~~CuwJ-lwtW16 zb9Zi8r^qq?dR*R z>U6y>{5nn)b(ZO6d&9-@=6SmK^;lI>7o$t7!jvoy)n&Q8>gnugxgx$xmB)t2iaS40 zmBBr-a^R?V=FAcK?1=+n<>CGEx%Nh}>b)KE`JHP-)sFXdbxpolUAw)mMHY;#*w!o=S+lKK zG_q=B+58-=+tw@`Svj(FWbMe}k<}y1N7j!N0I2{{0;C2=5s)g_nld1DKnj6W0x1Pj z3#1rGHIQ;3^*{<@Ybt`21gQy96r?IhS&+Ken!+HJK}ut5YJ(I9sSZ*eq&`T2kP0Cs zLTZE*38@lNCZtYCp^!@1no=RPLW+e{3n>>;FQi~d#gLLAHA9MqR1GN`Qa7YLTSu>WdT@sW4Jvq{c{*kt!o)wr+*lF?LRG n8|I+k|3R9=O}Z0OXm@;`%bDtQC#JaL^U|FzXIf&i+a3NlSy=7y literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Harbin b/lib/pytz/zoneinfo/Asia/Harbin new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pXt<8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Hebron b/lib/pytz/zoneinfo/Asia/Hebron new file mode 100644 index 0000000000000000000000000000000000000000..0bc7674bc2bb175ec32de8d57ea7031a837d24f8 GIT binary patch literal 2341 zcmdVaZ%kEX0LSrj`5%;mnuY?30Y1jX0{QchK$(Hp6pNHV=iE$_ zxvbWT@P)ZiR#aoI82xeCN-3U9GncNXErlWGRY8P?^n8!bH{p8GtIpXupS!br-rU{a z*HgHn(q;YQ2sTf+I9@RqpC>e!+qVTD>(0|(iSPGawSM@fUv)KBYSmJ!e%vhek(a*n z{#4Q0aCD_sJt@m&Pk)@~4f|B~9+T>9->|Q*t;#yrRqy-r^^4Z|)=#{?+Ng#LJJ0wo z78Y2S*4*RmpS8VVAivprIedJ>V9F8S;An<5G%?6K^!p)ecqrBP*R**eWWzQcGSaGU zx!fy5_qx=i&#&lvw_Z@;ouhKHd_zQR`bbX6YE$=>ysf836pP5h1Q}(`713embo9?> z)U>l*I_67%G5u(toY68_-QQlJW4Ag~+?xxuv$RFTub-|H79Lay-N8DsJX0p69gwc{ z80ns}MInX>JYe&DV$IWs6-%p9%LspqA7@V8Put81Q`{Y9F7=)*`gr+KQL z+Zd!Ct`F2{MH}_Jl5RaeZND!)Zm-O^eMDrAJE1dsR(TinP1Fn9gGAQhVZCVgZ28E0 zCv|pNhsxPC^j2>20g+o2rgJx+6pPpXD#fy$dWok)KAOH;zjLEp8c;5l zjx_1V{>T)MpQ_gdU&n~TjxxP$PpBwrny-srj#edgQM$A$M9Jb{U6$Rep2&=p%VRrL zd32Dhxb*{78Q3E$2M&uTPal#`9eZD_IJi$f-PR~pzOzF<^Xh6*wc}k~U6UtPRd3hc z{A95@w^~0tXNg+l6nbsAOVx}^(lu8nsC9#j<#VS7)$_*_Wo^d^^}_x;&;pez<`umM>1_s>p^z#qm9P6$B^vtp%y3Ie_nvviLu&hZDmbshH zA@=gkeAV97mFw?}nxn2`(cQ?-kgXwm zLpF!(4%r^EKV*Z*4v{S)d$cv1M0ROwwu$T$*(kD8WUI(tkZlO+dPUv;paZt!V_( z38WQBFOX&+-9Xxb^aE)K(h;O3wx%aYQ;@D8Z9)2iGzRI6t!WL?8>Bh5raMS`kp3VI zLOO)B2vYzmSF@9Yb1%^bBd5t?8PrX&cfv zq;W{+kk%o+Lz;(l4{4vR=^xTSq=QHckscyVM7oHy5$Pk+NTicUE0JCz%|yD1v=iy4 zt!XIIQKY3vPm!h~T}9f8^c86=(pjXnNNe7xGTH`z)iG`KeIFs-DolG*Dd7oHQeCrbR#|gS` zcsQ%w!@V@=UI*h%*{-2R+nrt}H>3NK_v*H^-QRxIJUCt}534Vko>SN4(TXaYS~yoy z`3jq!5|Z@e8TQHW8+mFc?6X_l(tCEg>ASE&Ki@xYUYzLCFB{_K)t>YE^`fZhZ|;!H z)N(VhvRwvpYwh67!!np!V227j<*hBT@A?{K_~eLv-`T21_TIH0WS#!lc*TA?v{-+x zykkbUmFclrmrb^2RDYS5Fu7@)bw1c)@?)#?*Zykrt+!dmZ*Q^_*Q@0F#Y$T^Ge^|X z*|sPV6yL6Z^=}-IKz+srYXUm7)NhLCXLRv_dm@{J?9^mK(O z+ubV4SLE}%=ifd`?TENPSS5}X1(a$_sxWs|^7CF-A5&|h>v_dgt@HPZ{l$d2Uq36j z!<5LJ$fU@u$h643$i&FZ$kfQ($mE`Gc4T@_H$Rd9k^zzek^_#B!*;$q=w{%B!^^&q=)3^=@LXTL{da@ zM3O`=OD?3zad`rXBAFtoBDo^TBH1G8db)g(ggsrxNXkggNYY5w_#dPlakZD5$PYD~ B5Gw!x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Hovd b/lib/pytz/zoneinfo/Asia/Hovd new file mode 100644 index 0000000000000000000000000000000000000000..71c3cad48c11bce41e68eccc25ad5ed78efd1d13 GIT binary patch literal 848 zcmcJNJ1j#{0ESQLLLJM&&&j&U7gZm+lMQXRsAa=?nJaa9&4>P`=af9PisHg5WdX1){%}0e`-qWT=t8ug;f=ZP0H@@ zgbMn6vd7h~ddr-$KYwk8&M%FDqYE>f$r_RLp*fh^F@~16&0&4f7>Op#(cp*?Z5TGk zTnUe`6&L+s2r>I+omS|J96c#*B|7cn1$Gd7=~Den18Jgn1|Sh|9=BYxfQ;?(r-lqsGtA< literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Irkutsk b/lib/pytz/zoneinfo/Asia/Irkutsk new file mode 100644 index 0000000000000000000000000000000000000000..1e94a47987ac6c60308d661c70692d2726e66c5a GIT binary patch literal 1259 zcmdVZUr19?9Ki9jw$-visZ48Xr_9!})tV*CKWQ!->$->R5-~EcvJiY25n&)PGKnCf zhwMRm_>uJ%J!OOlA^1=rK^YYiWPe_Ii3JfwS)K1)f)UYk=W@^Y?(A$2JD=I9&Z80a z$5?1qxEW36=6OlROxG6Y-y0g7C@2iHXOLTw2E4bn;?QZ0lKpYmUdAyS#H} zUGsciw`cak-=61tm1-X~KPIzBsY7YyQ|k03*CM6T8J=_V%nIu>dw$>9lC)KVyVY{0 zy*A&flq)xKFLJZ}?}Lt+oz6jj{`J8hX6(Cjrh#(sl*8w308kK6AUHJ`C?F^(C@?5E zC_pGkC{QR^C}1dPC~zovD1a!4D3B*sGIM6n`&4VyAGP|o@7?8fc8C7KcHbY* ztc7JM;x8AfKjF>o(r=z`_v#<&sqVV^un4oh{h+;V{yg9IRcnpLDPHrX>^*igE@s;wU*2J!J6UX;KW&?xyEob2T-j%S zTe{!wF7IJ}ACqp2hW$Rjiru!q_c>p{e?>Dxa}}Gj@1`uP+lJkxdOD z=f?U%onSjmwwYlaLyTUH8D{UM&BiV3qD;AdiP3jKUCXV7Y2voLl`Y|eJH7qlid!NA z4te_rk86qCP-{eZ?Bt@sNeR>J8YtzkzlS;P07w-OsautwA#v_`Ib%^J0|!AdIKY$cbx z?W8Q;;*8FE!5LFj?W7LyI%z4howOTso%E;`R(j_IC*%4oYwSBiox9IvTA58bR@Q-m z*0}YtR(8Fx#+L;%HoDTvSiQK^1+(#4f1_#b}#XE*iD*xPy*DF9LdS6u?621pT*Dj;P*>VOmisf4R81yT#77)Ui-bvclF zAO%4xf|LZQ2~rfKDo9z7x*&x?Dua{;sg0{H4pJSYJV zgwzQs6jCXqR7kCmV!7&Sag@td*9$2aQZZLuGNfim(U7VkWkc$Q6po{ENa>K;A;m+g zhm_A%*AFQmQbDAI95v)9B1aWD%E(bijzV%&lB1LywRF|RM5>9D6R9UsP^6+rNs*c& zMMbKLlohEfQdp$2NNJJUy6WO0)pgb7Me2(b7^yH)Vx-1Mk&!ARWk%|Z6dI{CQfj2u zuDaMrwUKhW>Utvuchwa~N{-YVDLPVhr0huDk-{UDM@o;>9w|OjeWd)Zy8g%lxat)^ zmH=4;WD$^6K$Zbn2V^0Tl|YsPSqo$_kkvqz16dDbL0t8UAWMR*39=~2svygPtP8R* z$jTr~gRBj*ILPWC%Y&>BvOunSg^(q3)oX+-60%CjG9l}PEEKX*$Wn>_r;0ztpQkER ckJ*S6W-YOB^vKkaNux$57A7aTPh&!V1}iQw{r~^~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Jakarta b/lib/pytz/zoneinfo/Asia/Jakarta new file mode 100644 index 0000000000000000000000000000000000000000..3130bff56a04046f0a21c6a183facde3dc8bfb03 GIT binary patch literal 370 zcmWHE%1kq2zyRz(5fBCe4j=}xMH_&`%8J$pyTtA#oZjEuaVhLw!F8Ks9Jl_fF?`v& zuY-|^nT45^nT>%VMFFT3M6xh2q$_|-$gE)CWnieAz#z!LP|yHkClxU8`uK)0I03Pj zV^|17xTh0?S8xbOClUxD>_ASSaKs3lB3^efx7tn*c=3D^atX-}E literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Jayapura b/lib/pytz/zoneinfo/Asia/Jayapura new file mode 100644 index 0000000000000000000000000000000000000000..a9d12177d57cb8cf43c94b0406536e57c9ffda70 GIT binary patch literal 241 zcmWHE%1kq2zyK^j5fBCe7@M~N$k`=!aK?p*ogcnzU6jGV$i&FN(EI|VsBQvCUF!@6 w4hY-FH-sVFGlap>IXDC)4+bG5SoQ;|=RXjDECtaZi$OG5mU97}qie|p00|~BRR910 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Jerusalem b/lib/pytz/zoneinfo/Asia/Jerusalem new file mode 100644 index 0000000000000000000000000000000000000000..df5119935c5b01676e63d1fb1efd7273b8b4b452 GIT binary patch literal 2265 zcmdtie@sLcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W5x9x>J#0c#_99=4e@Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3vbUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv zBx;Ka`dT7l4;2&}cvzx?gohxof(MeIOYET_E3o%JTOib>TQki2dwH3;%=34)?b{o* z{d}GE-i>W2}7oLb&4NdAo8lm#G>rv#Yc9 zviSZ&sk!QxC1<|K(xV@w_P}eY+x|l8<4>fa(m|XP90Z!&uQ&BaZNVF_G|C1 z5$S78Xsmaq^cS`0KtrDle2UAUuSo`9hGgUWVu?Q}mBe^7KXh*9pyly-3%9>$);(vg z-|L%w@b|rW&9X+b=094gjAizZCp_lYrpMS9*54rm zkUo$`TumoPD@ZS{rWvFgq#dLmq#>juq$Q*$q$#8;q%EW`q%ov3SJN8Oo2zLK=?-ZR z=?`fT=@4lV=@DrX=@MxZ=@V%b=@e-d>DARVi*)O1+C}#TtM_XWgGAXQmphxKtuE{z?F_yV3@i)`(HV?D(k+02gMlF= zfsq%?7V_~8VekwNVeobg3SkKF2C@RYLqM9~AcO?h{($QG4+J1rgJ_V;K{UwqAR6QY mkTQ@jKs3lFAR6Qw5DoGXhz9uzOapxeqG{thE}(aHO}PLbe^t}~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Kashgar b/lib/pytz/zoneinfo/Asia/Kashgar new file mode 100644 index 0000000000000000000000000000000000000000..964a5c24b7b86f70f2b83760594e894b263b713b GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$l363R|*3o14EDuNGc?OfyKu+gdxH!1jJyt$|A7F%sa!zgb=4M2`1SN&gUn^onhoZ0-`eO%) z=g?_*C`!6Wc&HBMEj<(x{qbZS2JTPgCSc%*RJ}r$o8GC%L?GFkMl6eWBv(lfKFiv$oIdoIh^H9BpRoMa=A)^P2Go0W&cj5#2YJn!C@~ z_ZabKLf9Pk?By)Ty9$e(c1Qk=KYLekN&Z#3#3{t(a{af8q$%`%=(epyhyleuyZ*Yv z{p>TTG21ep6#BjFni|XPb8jWO4Y5BGx0W75>`O$ltHBQmn>EDx`g<|{LBf~ZTR-$5 zf}(;!21N%&h*hnGB88%bB8H-dB8Q@fB8Z}hB8j4jB8sAlB8#GnB8;NUs+LC4MiFOK ztE0%H=%WavD5OZFXrzdwsHDiG=%fgxD5XfXsnAk|~-gqOEGx z6xj^ADZ&|)Q=~I!r-)}zZ&k~u=(nm*0Ob@=P6FjLFq{a=si6Pg$#8L)8d_XG0Z>H} A#{d8T literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Kolkata b/lib/pytz/zoneinfo/Asia/Kolkata new file mode 100644 index 0000000000000000000000000000000000000000..3c0d5abcb545d917cb596de202268c3bfda34405 GIT binary patch literal 291 zcmWHE%1kq2zyPd35fBCe7+bIb$T@YpZNnMXLWwg|jaFPL5t_ls#LU9Xzz}o;q%!CN z0}BH~bOr+l1A}h_122fp$m`=9!r%eKPN6{|44%OuAeB%MLfCdTpi+G{`j|8ss7{4RjTV2DyxZGN*9?U8-wpWo*C&03$>`@&Et; literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Krasnoyarsk b/lib/pytz/zoneinfo/Asia/Krasnoyarsk new file mode 100644 index 0000000000000000000000000000000000000000..3107809022e4fd7ff95480826ccaaa838394de7f GIT binary patch literal 1226 zcmdVZOKeP07{Ku}ohj9!Y-kz7=%~>bpFvwjnQ2wE(^}LOsYi%MQ_)1@5h50oC=wAF zK_X%!ZKMm4Pr^pJBVy5zh_GmcMoBDGBNiHwI{!PGRK&v4xw-dua&l)D^Zm!!cAzb4 z{#br{!p*9&H~Z-+`?xte?aTe{!O+aeq4cbR9+RKbJtKegRjT0Q%k=C4znRm&Jss$4 z*SWV_HF%>@=Us^D{8MEb>ImwB9eG-q9Mf=YL>HD0YEj_1F7iLri0`vRzP^>kLvP~6 zC(gx-UtBXKk4}!1-s_RFo~u%R<*Y2}Ixb6(wu|g*m1R4-GrW)U|CuCC4{1%^U8&vs zSl1U_mb%(r-H?4u>ci)?e)y6$cssP==@D&ww@H(CwrErTL215NsZR2J$auV2nah_w z;hHqrm*t&!@V9-A&zP-w_Qy2NZe#bIDbEyR_H?q(JjYKsAAjffH=CLrlWa-2kIfF> zWjvYX?lTYPABOww*XPFUV#vs#IcC_%z*!94)easRJ`w4D3CCaIFLY) zNRUvFSdd_lXpnGRZ9GUoNJOqSBo;9tK_O8gVIgrLfgzD0p&_v$!6DHh;UV$4+5nLV zU2TX+j7X43lt`FJoJgQZq)4bptVpm(v`Dx}yhy-E#I81EBxY9|G!iuuHWD`yIR00W L=khlwugv`k^y3CX literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Kuala_Lumpur b/lib/pytz/zoneinfo/Asia/Kuala_Lumpur new file mode 100644 index 0000000000000000000000000000000000000000..35b987d2fd11e108621585fbd7f4d2eab27daf92 GIT binary patch literal 398 zcmWHE%1kq2zyKUT5fBCe7+bml$Z2bCUA!yZZ^8ktCkdxEKTSA2F`na+NA3r)iC;Mw znV4Bv*;v^b7~;b~8WUp~SQr?R3P2{LD=_kcNDw=-0z}qLU=U_tC}?01_wfy32nJ$b zM;|Z|9KzrQ#J-UsAX8942w~@P0yTh~Ed2wj^FI*OnT4(b(IDr8Xpk2`G{_qu8srra Z4e}0%26+jj9po(rI{1qV=viG0E&vKxTvz}A literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Kuching b/lib/pytz/zoneinfo/Asia/Kuching new file mode 100644 index 0000000000000000000000000000000000000000..4f891db77dbbe03743ddeffce02a0b552f439387 GIT binary patch literal 519 zcmWHE%1kq2zyNGO5fBCe5g-P!B^rRl+Ag*UyVBh|_E)Pn?Efqsa4>dF!@;Wy0uI|f zU^u+(@`a-c!VO1fum>DxbZ0nTX?5Ym<1-8=qxWApb>x4;X`9ahrzb`*T=K~MAU5$G z0|O&76AENuW@TVVF92G@kXgaN!oW~!z{ttKP|yHk*G*s$0JBAWd_x$V{DUAwa0r7J z5c@`kfV9Cu2nqiB0oC;%2tfV=(I7v9Xpld_G|;ag8suLv4fHdJ2KgII1N{!7LH-BR yz%T&Opl|@wz_0+(pzr|EpfCZ^pl|`vps)ec!0-XlpfCbCi`L=91q?V{3oZcaNVZ%6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Kuwait b/lib/pytz/zoneinfo/Asia/Kuwait new file mode 100644 index 0000000000000000000000000000000000000000..5623811db35aed90b7aefb7518cba43bf56d43e9 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$hnnh+QPudz@YO7B&B7*z~bW@!r&Mj0^%}+kYL&m Tunq=>|3HA>R4$)0m5D^0=lgKd` zD7ndqNn$bDFe=)r-};`0$->~h-1l5Ax#6BCI5aulEPl+U-f*)w1#dCTi{T)cWgu*zI8pnKAtG3j0>mZOghu2n(Jm) z7GCaXMJG$L_-Iip*^bE4wRTZ9?@hR8yuvfx7A+6A>J|M}Q7vQIG>A=>};B=?7^D Y>Bwja>B(pc=?ZBJ|GTeC#rA2gPZB8gP5=M^ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Macau b/lib/pytz/zoneinfo/Asia/Macau new file mode 100644 index 0000000000000000000000000000000000000000..b8f9c3696ac7532d45d4b7b4c395d9a7f64af28c GIT binary patch literal 795 zcmcJMJuCxZ0EWNn53S#p*ffIhCDKGJE`AbpuqcBj5=m7;1VeRVk>)0m5D^0=lgKd` zD7ndqNn$bDFe=)r-};`0$->~h-1l5Ax#6BCI5aulEPl+U-f*)w1#dCTi{T)cWgu*zI8pnKAtG3j0>mZOghu2n(Jm) z7GCaXMJG$L_-Iip*^bE4wRTZ9?@hR8yuvfx7A+6A>J|M}Q7vQIG>A=>};B=?7^D Y>Bwja>B(pc=?ZBJ|GTeC#rA2gPZB8gP5=M^ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Magadan b/lib/pytz/zoneinfo/Asia/Magadan new file mode 100644 index 0000000000000000000000000000000000000000..e09c4dc2e2fb483baf6e7b131b1ff1a0c16bbfd8 GIT binary patch literal 1227 zcmdVZPe_wt9Ki8sZZ`iMiXi{AR@$Fb_hHq5h5L=gHRnLh==e{BG~hNmn1}ojy*5$^LcrAx5IwlS^XEz zhtwad)SPg$qUPp#{Z+F)tqc#pcsM%0$-ggER*}w?7hQZ;{^LPM#lrnm<=jke^RzGJ znGDF5Y_)jrZIrEpPT6*4QL56PrTX}L*&d$~U*x6isDC0go{a1)y(xazg!V6v=v|*i zRPDe~RXcksSNA$PRzIHByC0p<4R?~d@p`*%y4<7#XRCDZWI{rh{IaL5QJQ;;Wv_RG zgo9rsy!=gCJZ|0cVOI7pE$9Q26B3zwqgx-(=(bnSbo-5QeK2!dcbvPcJFjIW+C8AV z&fJniHT}A~OXP52tB(14CHD20^f;TP=WRred@PgrbFU<(LpnLMB33**uN=;T{N*ZK zbFFi`3Y=>X{`OqtR%+<9`7zbtuyQFinsXE@l^NqXzptKfzWUDZH#tcpEtTwy*;}8* zcPU4H+0XgS`iJ3u|Mj^QvlucmXpR{+GH_(*wr23i@R0zJ2#^qv7?2>4D3CCaIFLY) zNRUvFSdd_lXpnGhO*}|IEF!WsAt5m#K_O8gVIgrLfgzD0p&_v$!6DHh;UV$angEdq zZB2+sj7X43lt`FJoJgQZq)4bptVpm(v`Dx}yhy-E#I`16BxYL^G!iuuHWD`yIR00W Mm+&`e2nI@i0{^2NF8}}l literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Makassar b/lib/pytz/zoneinfo/Asia/Makassar new file mode 100644 index 0000000000000000000000000000000000000000..0d689236dbd55f2ec72468fc855e292d3dcc5b8d GIT binary patch literal 280 zcmWHE%1kq2zyPd35fBCe7+bIb$XWO)cET>Ps}oLl$TnOGdso1~$i&RVz>xm|q!LK7 zFfbG}FmNz1)JLxIa1Q~|;uzzxJ8w%`vC4+#DPfyl!<0wB6t-hKjz2DubOgIo)uK`sW-AXkHEkjud| o(DfiQKt2G`AYU*rGBYu=05KEu|Nrt}X)=Aoz`zCcqOJuO0DoX$@&Et; literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Muscat b/lib/pytz/zoneinfo/Asia/Muscat new file mode 100644 index 0000000000000000000000000000000000000000..53a22190c4bde74760d0f1b995f882507fb71197 GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$XWPl+5`qh1_rYaASnw61{NRR5C-?)5D=Fkgap%m UfORl1{09R3rg8y|*EQh+06%#a!~g&Q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Nicosia b/lib/pytz/zoneinfo/Asia/Nicosia new file mode 100644 index 0000000000000000000000000000000000000000..3e663b215327d8899a4b3fbe4623f066630b97b2 GIT binary patch literal 2016 zcmdVaeN0t#9LMoP$d(r$zG#Sgc`(Toy1)g*#K4CL@PVtxxEZO$QQ?9`lqdwI;wG}@ znER@1&Q)7YVy!vU%^o&;nC6^rj?PxDwZ=MWtLB8YnX`1ve($sOSAV(nPv`8O*V)~@ z|NY*d==!a(0_$H_g87CI*Ix7CeIhUKzn=c-_>q%dVEC|`c(>l0@Wn-YVt=tS>A+9! zOP)0;>}G+wCCOq zIP-oUv9rFt>11E{+J559kQ3@ZZRhlza&o(m*m*k+JK_2^JHK|&D~KF&3rp3TpR?U9 zN^A8N6m-afu`XE{?9+wAo22+&yDs{?OrH9oMoR|jwDjyEUEH%wm%Np#OJkY3th-pA zu1L_b`k*{B=T|K+8&%vhwxd*-IR*mX5g`|7o>Cad);|zK;9TStGkY!*WH8f=$_7g?K;?{) zl^u>7XIZJUtnvRm`KBrowaUw@c|`eDT%7-iKEMJ0lmD6PzPUK)ymQ!*1CJbfGtz^ho%=CVpf9kP$$J02u>h5Rg$oh5;D|WFU}{ zK!ySt3uG{S&1fLQ;cLbN84zSdkRd_F1Q`@$RFGjo#swJ|WMq(`LB<9d9AtEm;qf)& zgA5QdLdXyyV}uM6GD^rWA>)J$6f#oCP$6T53>Gq4$Z+|Z@j?d7*Nhl4WXPBygNBS6 kGHl4W;s18v0%q@W3Ru2Sq%gNAH=I`(isXm=xA}p;0b|0?X8-^I literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Novokuznetsk b/lib/pytz/zoneinfo/Asia/Novokuznetsk new file mode 100644 index 0000000000000000000000000000000000000000..11768662734db5b34b3ca8929f787e8483d4d0a4 GIT binary patch literal 1248 zcmdVZT}YEr7{Ku}H%qsuZkEfnS}t3D)oGPk%i6SMIyJJlz&2QIZ5*WI|mO5qAFX3WU1pvS;l44u^NU*!j=eexN&| z{uqxr;mtIdH~Z4GS#J4)x%>SiL2vkAW|l9jN{aetmVA4a_I-GnnLX@Lr9(S1{=puZ zbGuUlH(F%wg{aIsRV~5ZfXv@HUCNRZ5{iz?f~pZI_dl0~o`({4ebV7CZ}p5rk5P)(RyE}Ub?GKB8PU%vifdW-Vv7-fsIld z2}$jbN~!b5blvM|vg&)8UOhM_(P5WfbJL^iAH3IV&wSMDu0PSS{m*p#^r$o>?&`+9 zk7a%NWu0gokPU@LbyMiPG>u)7W@oQ7KRqlh@7g7KXREXhortwvOysQOXRRF0g8a)> zIO#QIs;j^``QY#SqGGp8sm(d_V=8NpGW%58QLI!~ihX|leZqPEnXZ&$hQ0UN&v;wg zES2nt+shV9wQt&%UoE~?Ir3xg^NaNl_x$zO=O)Z>-^iVF%)KLb&v5^?rU9e_qy>f^ zkS35WkT#G$kVcSBkXDdhkYkam!MkcMncM@UOZPqwBhhOUsdkiL+{kj{|Sklv8y zknWK7kp7Sckq&K5i%5^Qrb(nrq)ntxq*0_(q*bI>q*V&;xap4k30JQc{;LCFP;8vgiA*Am|X?`n~M=yu9qayuYt`_{_k-f zt-626^55yQR^NzPYp%3d^=JLo+P)>$y2PT@5S_CcgR@qX?}fF#`l%IgeJ}#^uZ<0F zU&-c6*Jbn6nArGiXfF6LZv?-mL`yy=TSrIbrfVZ|^Wa5k96u%7j^0wC3+L39_CdAv zK#$ty?^WSYn+ku6sE98mBQtes`+^~N6uzoxak<=i&nw#}=H;%D1-bk7w3H`iIo%bW^1og#j}Xa%{-VM>CR{?KGYUiwde+ELk^ zG)1bX*FFxKye}Mj>>K+q|6!w_f4;V;hs`3}$d>tQGesmNBB#84Ab6jBvZ7E%{d7*ZKh z8d4il98#UFE)S{CRu_m=h?I!bh!lxbiIj=di4=-dij<1fiWG}fisj&6O z$}=~7Sas&ZdBqj8^*xp0XZ zKWf*K`-gS)jkvD4RIR0_gSxi0Sl1<%Ygx^AT_2p$a{pW1koQ^xo_Pt(PRqurkEx3D zcTyGO1Kv%e*R9~tplp8FFO|K$vgKNjZ0+ol(1~`bI(l2f=g#Q1+D_emphb6-9MVX* zN+a`8jr#XXbh1cy&V{6U8#+qgEA z_erdNSR1purK#+$Hcda!xcj2UUtQIG?^?C_ahoQ_E=clbOzq~+AOzMbDC@ zo-Fs`i@%+7JeJk)()^fKW6Cn;<7ZvVEXzK|IkPX^aKG@*?{6lOw$+k6=xkgn`6?1cF3@go4C^ z1cO9_gyU%9K>|V|ax@_!F|h~=i3$k|i3Z5!bRdm0!AWsG$A7~JDQ-8sFARdxRJo|zluDUzd>ar F_a`$?5B2~6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Oral b/lib/pytz/zoneinfo/Asia/Oral new file mode 100644 index 0000000000000000000000000000000000000000..1467cafcc983c60e8cfaceeca24b0a1f5b59a3ef GIT binary patch literal 1100 zcmdVYPe>F|0LSq+{>gPU>$2^xt2LFi__u9ymo>F!J4nz!lq|Mii$`0J`N zZ}@Pvnh(#__s#Jt{PfG4mjx#feVjiMTC#(_*LHB@`}5GZ^?Z0WYFDq^$w%JhRL%Tt zRr_p29evWLj@`Yi?3-=s_&|d?k%_329hIuCv7k;xe(BQ{YdTsQmeG4tOZ8V4o%+uo zV-3r*cH`nF*);P(Hc#edOYVg{b8AAzh91bXg9|!7_DG-W9M`Sc8#+-trjzk*oh-OI z6&aPOuSwnZGb7Kx59#*R7TGbEkezQ2%L@;K^5XQmbcQ!%I#;DHCDt5a6&05h7n!^C zz|LLfEAv_Ya(_kT{`Wy4Vs+-HfoVKX$&gX)YE-R^&BHB5NXxdYV;{Ws!A} zg^`t!rIEFf#gWyK<&pJ~0+0%j5|A2@B9JOPO&LfXo~96_5~LKQ7Ni)Y8l)Vg9)qAD yq#~pwq$Z>&q$*ES7E+g|DGaF$DGjL&DGsR)DG#X+DG;d;|Nj#GrdYQ*;ok!M=j9&& literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Phnom_Penh b/lib/pytz/zoneinfo/Asia/Phnom_Penh new file mode 100644 index 0000000000000000000000000000000000000000..e8e76276a657ffea33afc25ea56864eddc7f43eb GIT binary patch literal 204 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZJkqO9~Ij6jh%8z2WnvM?|t6)Pb^?yWMXDvU?_b8Qd!=? z0A$xqVBi3gykL^W$2Ww*J2-^F%NaxfF#?2;VEYfKw*No?as`M6xdcRmTmzyhcM%uR IrMi|}0N&#}>i_@% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Qatar b/lib/pytz/zoneinfo/Asia/Qatar new file mode 100644 index 0000000000000000000000000000000000000000..3e20373990e3da7318761d709165d6ede9ce4332 GIT binary patch literal 209 zcmWHE%1kq2zyQoZ5fBCe7@Ma7$XWPlt^rHeqyr3$ObiSL0U&t`2L=`f1}y^y4jE!_J%BdpJ3?Ra7O)c zHOL!2Tv_?>9KR{YlVz+qRx)w;TEswymeZZtSSqO^FY7bM&1}) za+xlZtL9BUF`)DFanrq+);njunq6~^x~Ck|g_oc7?(s#v=fSMjXXf?Z(FN0+ed8-H zR8w0MlDlr%FL!8pxIP?-{{0RsRVc~-LglrpRcc?6GbnlTJ}51HBj2u2>eNMVmHYO$ z=l+9%>N_yxj-!D(JSdNH$fLVFPjJVNzvT@6VXn_VfAvi+W<#dqHS-}8A~QN=N@PxC zQe;+ST4Y{iVq|7yYGiI?a%6U7dSrei0VD$?1*hbIB;k}SoRS8T2N#JTnINekxgg0P z*&yj4`5*})86hblIXNXMBrB(+h2(`KhGd4MhUA7Mhh&GOhvbL?Fn9%rF!%yF2oOTpa+pIze?YbU2ZFF|oi{)<$T1)q Xyt$|A7F%sa!zgb&a_IiUJf_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Saigon b/lib/pytz/zoneinfo/Asia/Saigon new file mode 100644 index 0000000000000000000000000000000000000000..c14226570b1de09573c3340af718d12dac021c39 GIT binary patch literal 373 zcmWHE%1kq2zyNGO5fBCeE+7W6MH_%bM~`#E#KqcNXHB0qT#P(daLL;4!&Rf*0#CL* zX?UwYgMrKTI0FMC6Eh1FGY~Q`qyY6WFeHCtU}0cLDq!GbU?^x{;ALQ_o4_Cl5%KX2 zVF&;bp3Xq*62jmW90Jw^BSJ`U;t!~%|3Cn8Du@O-7es@c45C5K2GJm=gJ_WRK{Ut< TAR6Qi5TJoqxPX4tHRl2Vh{9hr literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Sakhalin b/lib/pytz/zoneinfo/Asia/Sakhalin new file mode 100644 index 0000000000000000000000000000000000000000..ec62afc5995a962bbe321611d86d695c3b33517e GIT binary patch literal 1227 zcmdVZPe_wt9KiA4+Lp`klH^uP)21z3>Skru(%hEGoRZ|a^oN4}VO}1hLlDRVWgU{} z&}oQuNGynuh>)?vK*0_cA<EEGCyax{&L_Obk4T9*UdBV*k%*|_`HpdO?<;?8Ss9Fp^wblps3{?4s^Ip%z*WQ(TUA`|90~-p9`Hc&K z0e>#6Ha)IW)#K|_&6rbdzPhAphd-;j<8RfL)VvBrXHqv>P>}tT@1-M~lg`muIgpu{o7UMaXVe52W6hWUv7`c=nM0U%E%pY*{AuDALbnuBAIEaES;q%JRiF+m3{qb z=}iop!`y?hRPjvnMIsI}#aHG`|HEpRe|_qQ8CFbLGiBAh&AKTor>xzsSv_U_6ao|m z6bcj$6cQ8`6dDvB6e1KR6e<)h6fzVx6gqYd9||D`Ms^J)3MUFF3M&dN3NH#V3Ns2d z3O5Ql3Ofot3O~DsAcdh_Ly^Lffh2_`15E~=3`7~2GEimUN+HX@mO__-FNH7zW4nej dg|l5ln!=hwo5GucIQ>tUmzzI9x+xMa{|QDO5mx{J literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Samarkand b/lib/pytz/zoneinfo/Asia/Samarkand new file mode 100644 index 0000000000000000000000000000000000000000..65fb5b03de72200233f74962f865e3e9e9e85aaf GIT binary patch literal 691 zcmd6kzb`{k6oBvR52}?|N?T8Pe!e0Skq8ozb`e7*FX1>@9aOz+Bw z=_}f%f40>OjMmMd{b+^~x5kYoJon|@%bj04xw{8%xNplfi8|m?w*iT9pcoQqoEnq%4(5X@amUtI;&7{)3;_ zuZJHJc!IcMj4y~Yh_`^^4&o2u5aJQy65>!}*0sg05#AQ=slkrc$RETl_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Seoul b/lib/pytz/zoneinfo/Asia/Seoul new file mode 100644 index 0000000000000000000000000000000000000000..fd91d5b729aaa78253bd439c36a103fe88ce33b2 GIT binary patch literal 571 zcmWHE%1kq2zyRz(5fBCeaUcewSaLHNy!y|vq4^OhU|9C1a zk?|~vx#GE?&x;rO1r;xzf5~{oA5`(`__d7J?@cP+EZ>pw_PkidyY^WbA504?6c#5; zP|Qo8pw#R%LD|o90s|uxGYcat5@cZmLI#F%4xp_JkLLg z28NymjDkMCAq?KZAq-y5AOeWJT|z)Qkw6Fue*FQ}_a6vAeg@GXe}ia{-$69U{~#I^ z1|S*~4j>v779biF9v~VNCLkIVE?^oMHXs@lK42OcMj#p#P9Pc-Rv;P_ULYD2W*{0A dZXjoY!VW}(!Vg4)!VpB$D;&9i;jC-P1ptE4#?=4- literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Shanghai b/lib/pytz/zoneinfo/Asia/Shanghai new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pXt<8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Singapore b/lib/pytz/zoneinfo/Asia/Singapore new file mode 100644 index 0000000000000000000000000000000000000000..9dd49cb7a72f1e0708e92fa53b7e0b4fa001553a GIT binary patch literal 428 zcmWHE%1kq2zyO>;5fBCe7+bml$Z2bCUA!yZZ^8ktCkdxEKTSA2F`na+NA8F3yL>yu zCVu5$WMXDvWn*RMU|>j$1?dKoEDQ`u1q?uubOlCU5D8*uR)EO52@Jwuwm1VrK?8%h zk8cP=FcAAX`hbbx5C$(G4t5U#nS};I2)msNs0HMH=^s$N|AC;+EO;@92Kfa7XLi^WED*K^z=CmwSGPb8j-_e6a@4 zo(ZWxMw!{+W<<=*dc!3%J*{e;fAMg1Zew6is=P9juP8qEzT(IIp2~%LsjAuO{H7^? z$~)O2)wx>n-CZx6huyN};-b`KK1=P<53)7>M*QKIvaR8X)OoYAz3iFWGfM1JS%$XLU8M(=ubOgG+6>ZU8*y7@x0ZaGz>gU1sRIvq}zAb92h&E^sbS z;i_v*k*C1DdgE{F;u589Rhu7E4L?vGrAG5Evy&ZT?aV7X?617D{B*7Jej;h9WN*x# z1}rs@j#_-7ayi3ZJCpSfC;WZ+b4zA$(#UDE&50wYj-0%$Ienx6qynS_qz0r2qza@A zqz@q1s&U|{mQe*f2X-3|A7JI(EFMe^gs+E2KgTzh$+rtM?7zFKcN zO?rEzPw$k^ncb>v+4K6$eu+=~acwG)@vH;aljh(fM}lh`>hPpVhi1hbb#}?|e5*R? zAC%LvadXzPsLy-W)kW#OToz`ktE>*WPMuNV=xH5(@td1-zufL7sk_xgeZNp<9$w$& zabm?h1q!t3eN*x84U+IMtlXg??b%#1DZwe7>V#BOv@7OMFOiP0{Xe3#xI_j<#mzG+ zIr2%0x7LxG#yb13x1!Qs>-TYfUoFpmJF?o_*Uvue>E1Ny$co64$ePHa$g0S)$hyeF z$jTAz(#YD#;>haA^2qu~0i*&_0;z!%L8>5SkUB^qq*6p%3aNz@L#iR=ka|c#q#{z1 Zd8o;uC{h(Ei_}F5%l{v3Vp;$I literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Tashkent b/lib/pytz/zoneinfo/Asia/Tashkent new file mode 100644 index 0000000000000000000000000000000000000000..1f59faa5344c42393ea7ed4d8dd2c749d409131e GIT binary patch literal 681 zcmd6kJxjw-6ozj!Y8w#?qD@TI`k6`*ir}J4Tm+#)!3a_uii1*c=;WjzxH|X)90G3Q zqTnWae}IdlxH#0oLAwa*;KzB=3WAfH=W@?;c)5Y_oS;}-M(89{V(rjGd&DZ}${N!gYZ_=6SV`02>kyA!tXLR3O@LX<+ZLexU^LKH(ZLsUa_LzF|bM-=rC{Sjpa N@Sm)~k|j+{SzlSIh6Dfr literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Tbilisi b/lib/pytz/zoneinfo/Asia/Tbilisi new file mode 100644 index 0000000000000000000000000000000000000000..0d7081e975983fbb2425df33c552d0885ca14664 GIT binary patch literal 1142 zcmdtgKWGzC9Ki8+nrbcyPHOX~@lUI!sjWV18&7TZwNOi;!l6e%QOIBmfg>B}n46onEzj=Q z{DVom=kA!@d#h~s-59p}uXwg~v}oJo*l4v+Vb*Lp4t)Oz#fu+-gF zEB*39FK4t@&xR*gTJ@>9)$sIEO!qy>nEu&M`b>4joSk{2%~;)-`s>=jRLk<<(3c5k zOKfZ7ZrZl#-tNW|E_W)KO3G*R?Z#6|71!hsVT~!(p_QxDIp2|g$}{X6>&6M^@|~TW zJL!$Ryp_FtbNzhue4v8CXc=KQaxqZDLp-vHE=R7$?vFM2_ZUye527*tSpVgkT;&(D z4l-6`w7h1#$cT|KN6M&?aU&x~#*U0089$N$k^zzek^_afNK!~vNLolLI=}DZ%rAFajNgs` z^9d(MiFtA#(rjKjE1LB4kq|xW{xvzfF2)|*ZjTEdPqNR+E7c)snJUyersu}YQ<35I zI&$QqTKf2cUe>=*MfD8I=*un<)9$C2H=k0m)m9l-)Fu)XKSw zWa9L0k@&@>tsh=1@$$4>H4rS4dS}bkH_nSSovAwcj8-X)_0m@PUDyk{_1c_Rm1n%FFBS39Dc~@>~SS@nOMs@CvDz!PQQah4AsV&i0 zr86u->HvKFgi04PuukLKl|aRJ*tK z=%P%EDi&|$o+!82yC77T%419{=%U|N{0v37~tB@SITe^_|}TBYafOi@4Ds}GIX)Zxb|`bghH)zDKT z8!t_YqitQfsX0m=t9m7m7ZnP_$JgJ_-*?(S{TN1^F#mh5{)Q1Rj2VWJp6e527{boI zO>)ok?2S&tX`UI5EnL<+`Pnar^Urg0n_u_NZSv>urp$xcA=BeA^Ftru=k@S)L(fbN848OX-VdKBN3a0

LcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W5x9x>J#0c#_99=4e@Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3vbUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv>vnPzuD=m$xbahL!p+k^8g8@83fx|O ztKrVG{{nZr_cSmvGeIE(L)`?RCJ+fUp=SXjCq%?6IE2B=8AP~*FfalYfN%(Qr?3L0 zKo|sgfEdIU`~l(t!G9n~J+!0%M3Ps}oLl$TnOGdso1~$i&RVz>xm|q!LK7 zFfbG}FmNz1)JQ+RqN-ITq948|?NH8AwMZiogLg1hBJa?ckQ)q$;f8K( z1cT9Fu=yCoXtBs8Iv6C}zp{Qle>+UV# ztbEq%UT;+W)2G>Rn^cWgLD_T?P_Cm|*}Pq&+#5ydS$$V63y-pO=0UYZE@iv#LUlAA z%FfCI<+Y!i-r~9Fdf7L-@ApN|b^NI1&WlrNCrP2Pd#gCtRzN}%Jd}X7^ zR*}sj+eJ2vY#G@!vTbDJ$kvg~Bily`AQg}jNDZV&NvjG&8Ke$FA*2#RDWn!dF{Bzp vIiwy^5UGfiL~0^Mk*Y{pq%Kkzsf?6HY9qyw>PUH{KK}n3;NY$B4?DgA7D=#x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Ulan_Bator b/lib/pytz/zoneinfo/Asia/Ulan_Bator new file mode 100644 index 0000000000000000000000000000000000000000..61505e95dc53a4eee7557b0cbe91339a3cf3cb01 GIT binary patch literal 848 zcmcK2JuCxZ7{KwjbWxFF@>Q+RqN-ITq948|?NH8AwMZiogLg1hBJa?ckQ)q$;f8K( z1cT9Fu=yCoXtBs8Iv6C}zp{Qle>+UV# ztbEq%UT;+W)2G>Rn^cWgLD_T?P_Cm|*}Pq&+#5ydS$$V63y-pO=0UYZE@iv#LUlAA z%FfCI<+Y!i-r~9Fdf7L-@ApN|b^NI1&WlrNCrP2Pd#gCtRzN}%Jd}X7^ zR*}sj+eJ2vY#G@!vTbDJ$kvg~Bily`AQg}jNDZV&NvjG&8Ke$FA*2#RDWn!dF{Bzp vIiwy^5UGfiL~0^Mk*Y{pq%Kkzsf?6HY9qyw>PUH{KK}n3;NY$B4?DgA7D=#x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Urumqi b/lib/pytz/zoneinfo/Asia/Urumqi new file mode 100644 index 0000000000000000000000000000000000000000..964a5c24b7b86f70f2b83760594e894b263b713b GIT binary patch literal 171 zcmWHE%1kq2zyM4@5fBCe7@MO3$l363R|*3o14EDuNGc?OfyKu+gdxH!1jJyt$|A7F%sa!zgb^}zE=dpT^ z9j#S=j3s7=n-McN>yiO8-Ff>y{p9X*wKCW(g~ffDqWt5pi@x7#DW1LASu!(~Sv48x z^i70h^+=icZ!DKJgHBm{IxD4ppQLQx8(EilA%VzKSzq~3%6&t!q2QtfUE?~K9o8E^ z4y%g(J*r~*U}n>^*3rtbKE3(geqD7frK`_1>zY$FI&`>HhYuvB_GD1DG*wGomq)hx zmq~s2v(*3mDh<9o-SBQ&w*8pZ+b6~)GV@Y5-k;J<=|{Tx{FvS`bVauuxvryUN2Il_ zU&jtzmYwCjx-BNs?rzlaK$pb7^hk%ZPC8yiWY>p6Nj&yTaUtUcTGqT)6SKwZ~ha)P;8Qiz*pVE~U;~WgZ$;9;F^+97}&+vom;m{#|DC zR&DKBs_uq1#nKJ_e6bB3}P&`mf z*flOFHYh$QMkr1wRw!O5W+-kbb|`)*hA56GmMESmrYNrL8e0@!42;<|&M4L>-YDiM z?kM&s{wM}14k;EX9w{a%E-5zc8lM!Sc8ybtRf<=NS&CbVU5a0dVTxmlWr}BtX^Lx# mZHjM-al6Jj#kyVNonoHio?@TkpK=E1|8fqzoRg|>$omV4cPwuJ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Vientiane b/lib/pytz/zoneinfo/Asia/Vientiane new file mode 100644 index 0000000000000000000000000000000000000000..e8e76276a657ffea33afc25ea56864eddc7f43eb GIT binary patch literal 204 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZJkqO9~Ij6jh%8z2WnvM?|t6)7Yc)Y^eBy7k1oZcDt=x{lXc?|-fh-bcE<{I2@kuf+FtN_NaoRx}Q# zD;nRP&+nY>ec^w0S#~`bkfu8)W%u=z1TKc8`E)>n={{|_(5id7I(6?6kG59VXj^be z+rEpodn=`V_Ji*GwkZ2^)7nueNay%_>B{CLbmOHQ7`Z10&x}j>T2>>`o6>!1R1Y=u zOSF4P50@k)R+rJ(=W`l&MKt~?HSxr+$(gXEZmm@-kzEjn%UOK6OIEyAt#&(I zD1rr4n0~G@n0~Z4r0~rGv1seq%1sw$*1)p65kb=;zfk?r~ z07*f~fXTqg0LnnhfXcv10n0#3fy=;40n9*b*FdIVwrfCBP*Y%2a5I3@{|b2-KZB-V GbJ2cA(evdBb{-g`z<671*s^wL~8uUNVih^4j^1YJK=OJ12_EDhn z=)OSZ=&|(Tp`Pbe_s&W6jb5p_uvcnNb<2`N30b-$D$BO+*6_iYE|0eBicLXXSyHWa z;asizK2z)cA%JMT)<$;aBcpQIZEZ|6LpF}>I9k7<&9#-3AGJTr_rp`0`O)Dxahz4QBdO6*q9|QUTba#2S+-PW`De8@GbcQ>m!eP_WJ1xUNDAr@F)ApE zCQ3=DF;w*=`?j1@A5yo_R8ujVudZ$}BG#4HOr?s4M>Qb)e+amyB;B7+5iW zGUI=imX#0srSwjplwIB{tIq6{z|kgI-CZqf5`HOf_R89ttW@}?WS!@|1f3oo{Pt5< zPEMz)P7S82-i!pQ$8IjJ85z~vt0(2&@W;A~BFNO(wmwkAL% zLL@{aMkGiiN+e7qP9#tyQY2I)RwP&?T3Zt?60fZZ7>O7O8HpJQ8i^VS8;KhU9EltW r9f=(Y9*G_aABo@A3;;3&wq_8JVL%1~846@D@V_1ow;3a=w!!@iZF(jx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Asia/Yerevan b/lib/pytz/zoneinfo/Asia/Yerevan new file mode 100644 index 0000000000000000000000000000000000000000..fa62c249d07fca0a0c76926d4d1e15b4072b41dc GIT binary patch literal 1277 zcmdthUr19?9Ki8&)8+n{1yS2v_Rnn2)W&jSmDZNdWo;ogf})2LWze68O9=H46l6U` zR8)`@^$JZN8T=q32#N@b{!-_AEh>Vb*UsUd@41Jwm+iB{mR&nz z>W@)nPIwq;^RN#bHp{i(z}n}JE4ycf&lhJ0#-o1sSjzv=x*2$(iosGmT0MHS7`k&> z<_sQ@nhSk0_js?&JGe>a@6F1Btw~v!i%4y2wJd7%NnPl(EcU&Wuq&v;Z@s#H;$5o# zna7SidXQ?kcPqMNXu@v1GG;fOxobD~->{eVUACh;PT8^hc5zwTNoncXBk`Jjl8EIc z@ueWi&~BZ4l91(}a(cx`KvtHTb?eo*ZX2GVR~_@~)dO#Is`rCV_gBf9Oi8zIe=Qw# zBRbRmP&!?gbhdU#vag1vtL&U~jbD^?PY=lY>xU#adRFI8Y?BRx`?a#lrcBOq*DrIb zyTa}9R+#CVR_XiOR4Q`Ye2=Ojs>+q>?6y2gB~$Dt*B>XGn>*7vw`TsnH#g0>xoOVX z=kf)$In(1T8w)CJUekHAVOVB3tMm4ADg46^|Ni^OzM2653IdLSK*2x(;nbj@z@Xru z0HGkEK%rovfT5tFz@gxw0HPqGK%!uxfTEysYG6@tIW@p2$SBY#*eKvA=qT_g_$UA= z2q_RL7%3nrD4iOZ6r4^CPzq8CR0>uKSPEJSTnb(aU(| ZQv;oXodTYMo&ul#SNOf=FVfWF{RXF-7!d#f literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Azores b/lib/pytz/zoneinfo/Atlantic/Azores new file mode 100644 index 0000000000000000000000000000000000000000..1f5325324590a123e9ec7143c20c9fac1e471928 GIT binary patch literal 3488 zcmeI!dvwor9LMqRT4aQ`w4{+=Cae~_*%7z zzV5YHDwQ(r>zdh&Xq8+lb0c{Y{nzhze!s`gZ@>NH9Ixj)Wxylp zan4^JHO(iyctK%-gcOc!Lxyb>$szgsym|vAjF;S$S{Fo%(*}B3;=h zpdZC$=&ITWbXBliKmKu(7H!KEi^wi>09dyh7EGhY5w3M9iozm)`*oP_etqFxch}Jc?w*e)xO?C8{Cx{v^uK-SN&mYi$NAr9ruqAa_4h0Kb@LB& zZsQ+}YwjOvT;D$&k?0<&(b_$7v6*}H`1S6w-C^$WEx))YJ~{6GkiW-0`TAD($LBtE zE2q5Uo_f@Ce|o6IKb>6YpXsvPKN~;K|2gsn|6JW{|9t3J|NQA;e$Ct{srCE~QhR!$ z%J|k=C#{)YlhV|y+ciwDy}g-F{H>}U|A__|a z4VD!K8oseSaAVHAK%=QI1Og+o12+vE8))2XSRk@Pa-d0c*TBsUS_PV3-89gwN?)(p z+0I_`gYjNec@yuJjp5#{E6;26qN94->~FNi^b&13zEES*mTT;wT@shMQsNU<%kBMM zkyZ`oO6#~u()v=Cw2hdpZTAk9b{8jV`!!wUj@_x+VaX6pSlwRloY6@;<~C3_JyJVO zZzr9*RnabK5z_VM! zESs;rM~#v`IZtWd`<~N&BOlh}HX}8qS9k4S?;gE3I!f<77o(}+-6Zv^YxMrJ&1Jxc zXY_&c>N3!)BZF3+kil7}WytI@d2mR%44rmNhjsi&9vW1tX^rN{!wChNUhM-N-cWV; zkykb2Ql>t#d9psbH$_Lh`GAhxG*%vaHbF-%NtV$WjdaY6Rx-A42gyvYC*xvol<|od zWI`QBCbkUMiIw|hQmu13Y5NA5d~%mgS+PVOFI%ls=d9Cd1#|U@$%`~=R+ff@UJ-g_ zm4ErHYQA^*>tFp{*j3g3_F28g)&JEq7(86*IKkk-=FXMD;Hk=xYQbQ|cTTO#e>R)r zgw(a~=C`LRfB&shi%*>Tmw(DgV5H z`^@+40-pcl4}<10I3YrjMMPH7YL*dMM`R(9l|+^jSxaOwk<~<&6IoAWL6H@;nk7Zn z6j@YcRgq;y))iS;WMz@1Mb;KsT&r1KWOxA@ky0YHM2d-26DcQBPo$toMXjc!NKLJ#s7O_jvLbaw3X4=0DJ@c4 zq_{|Rk@6z-MGA~m7%4GQW2-4LQe~?tGg4=y&`70`QX{oSij7noDK}DYq~J)!k&+`d zx0<3ORkxb5BXvg#k5nEhJyLt5_(=7U@+0*}E&${TKrR8~8bB@rkShbZG>~fpxj2xk u1Gzkq>%;l~{4oC#|LOTr-JB~avG!((PKs~Qszpr8nCPU~82eLf_1^%04Ky48 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Bermuda b/lib/pytz/zoneinfo/Atlantic/Bermuda new file mode 100644 index 0000000000000000000000000000000000000000..548d979bd1ece82ce76c18fff8a4d9a919ba531c GIT binary patch literal 2004 zcmdVaT};(=9LMoPG)E^fDoGPCu?NK92nSCgXlP+)7$?5U!8j`Ttsn#~k^~`%%7>CG z+X^jnY%Z7UqO4)gYGW9gH$D8!hw0kz=Nw$L+)Ov;rt^|Y;+3a3^p7@sgc5parEM>-k-jTLbf`L8_Uh0%P?M9t=_5+hTl+a1v zbeOw7Yqx11)tb9|pR@OLm6&^*=h?}LOp{)bVecy*H&bTav{Pqh$o(nj?6jntGVQM$ znsNS|WF8;UtRqKc`d35xz}`<}M*C%({q{DQS##1p*s|9=6ggxcUb4kJlK!F1ncZk| z#=q9wVASM}?9$nPU zlVu+Ze<_bo@N99$Po{Y6yq$Ob3sdsrur2-lJv0A+)dkjvvheM7HZ-M`Ri>{EUI-8y+8f4jceR3;6nE!r5(md1;f+LV(lO+SaUdBUhP zf1ai-W5;Ce&OE#B*9+$5j!E{F14m73%_aNlj{RnRt&99H|^B-PhHQ6z}V*N6Png^&<;_tN^kE$QmGvfUE+t49Gel3xTWz zvJ}2P>L6!tr6J$}4RY8^oSr=qskd;A}23Z?qaeUqCAj{+H z)(2T2U$;WY5+Q4ZEE2Lx$TA`8ge(-YQpi#vYlSS9uUjo-xqRJvAq(c~Rt#A(WX+I8 lLsku0He}uK|FdvGH+Xa1;N``NLg7$BUaYV%R2V7<-U7n(2}A$@ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Canary b/lib/pytz/zoneinfo/Atlantic/Canary new file mode 100644 index 0000000000000000000000000000000000000000..007dcf494240a4b4516ce6104197028f6871d6e6 GIT binary patch literal 1913 zcmdVaT};(=9LMqh5zu1gzG{dCh>3*O5e^{u1R0%xsT@o>DJqC2;zNjuiclK0m}`x> zj~lDGa_V%)2j`yEuxci0|&vy6X^XS*zTW4%l|AnG~31cGp-XpnxD}Mq z?&@(pdj5bO`(lrhPM)&lqg|Hr{(c+ZE=%3EG7n%eQbGTY85YwJmcYL6+q`l97Dc3W;? zk4;>A$ErJZL3x->5Y0M=QPYv(1h9%;xrQoWE^uEoJAT09U^WkQKnem7c6Z%wvkAN{H2 zeeqUxV3bvN|86Tk@5C_ITS`{FiMT*yH#FbSWIl(J^vXR`a_=Q z-ut~+cU_+Mpy!RWz{7WcE{jCHa8Y^GTT;xcBFZlVfxy2+|HlE}_WvyZZ)k`^Mvj@U zIcOe^8aZs_xRC?*bw`dII&$pD!6QeH96oaVNB~F#NC-#_NDxRANEk>QNFYcgzAh9b z79%v9iMFK`5MnXnnMuJA7M#4toMgm77M?yzp_jSP| z(fhjak@%4TKt=!=0%Q!3K|n?U83tq=kbyu(0vQTqERexKM#I+)2QnVWfFL7+3<)wO z$eaEpI;U|?ioWd8sE#xYv#~w%-NbPEoLph_ur)}H#YzF@AdRtboG6IoXhibBW!;>{mnPLJSFDk z^Pt`4*P+eznO=dPb=c6C>h1bSy}PdI@HZE9#FJAx^44MXx!NkejZNZr;-vUjs{~Z+ zkx>Q3GJ54Y8I$Uiz}O@iYmb&7-(VRxaJU4!C2DYYq>g_Zq#^CYbi$nhIbdL|LY%`!QzMIsj;ktn}9iH<0f=-w)s>Qk#zJJ!gwpQRep znkds>WNK_fw#K!@=#2dd8h_kZXXXZK!oF!TE7?sGb9`iW(0ffv?3HA<`V!C z$sE@kGN#{ZlI+?xS)JW3YbrnK+W0H7 zF6*J@1RRs~aZQ@*en~g@sc!giRP%a^HUD0@ZtQUCrn4)w;BJv@J`ksc4e7EaFF?2M zkCLKAu~MAtE!)EVr6lQxZ1=QDX_%Lmetj)vgL|~>@og#Ze5N}tHptGl7OgmTQ!AV5 zbXR%3R@GFg&845~zy9g#`*GHs5s$g4!Q$fn`*UiV)3!8;H{EHzadF{w{>5>AxBidK z)@L4$ls`FCOa$N?iqj2tp@%*a6_M~xgda@@#)BS(%LI&$pD!6Qd+ zX$~Jbek1@S0we?^1|$e13M3394kQpH5+oEP79#xYv#~w%-NbPEoLph_ur)}H#YzF@AdRtboG6IoXhibBW!;>{mnPLJSFDk z^Pt`4*P+eznO=dPb=c6C>h1bSy}PdI@HZE9#FJAx^44MXx!NkejZNZr;-vUjs{~Z+ zkx>Q3GJ54Y8I$Uiz}O@iYmb&7-(VRxaJU4!C2DYYq>g_Zq#^CYbi$nhIbdL|LY%`!QzMIsj;ktn}9iH<0f=-w)s>Qk#zJJ!gwpQRep znkds>WNK_fw#K!@=#2dd8h_kZXXXZK!oF!TE7?sGb9`iW(0ffv?3HA<`V!C z$sE@kGN#{ZlI+?xS)JW3YbrnK+W0H7 zF6*J@1RRs~aZQ@*en~g@sc!giRP%a^HUD0@ZtQUCrn4)w;BJv@J`ksc4e7EaFF?2M zkCLKAu~MAtE!)EVr6lQxZ1=QDX_%Lmetj)vgL|~>@og#Ze5N}tHptGl7OgmTQ!AV5 zbXR%3R@GFg&845~zy9g#`*GHs5s$g4!Q$fn`*UiV)3!8;H{EHzadF{w{>5>AxBidK z)@L4$ls`FCOa$N?iqj2tp@%*a6_M~xgda@@#)BS(%LI&$pD!6Qd+ zX$~Jbek1@S0we?^1|$e13M3394kQpH5+oEP79aOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VPi_@% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Madeira b/lib/pytz/zoneinfo/Atlantic/Madeira new file mode 100644 index 0000000000000000000000000000000000000000..3687fd66e3aac0f1271ee64958b0af1187e29112 GIT binary patch literal 3478 zcmeI!X;77A7>DtPkTf)Vlw1gxl1wbpSKLi9LUb%~fs^7w=DvicgpO-Grdg@u+%jgl zC9XA$(yLjnXy%FwqJ~?drR9>Q$JE3aqvv|3X>|IKU+PmYp7Vn<@X`Nu`??Pv5T*Y1 zl(V1k;wiK*zOQBMTgJJMljglV&dmG0K<3XcHVblRnvc_sS(vp$W%lZ07ELQMi(}54 zCBr6|rQu;}S;ti>E9j2Ny0Jj5%>7bje-foW%S<-zyN{|>?~hfhhdbu;)Y)cDr;286 zRClxPkt1eZ$u6`0>N>M&_fD0wZmrt9aH!ex(H!;F+!D2Q^f>kPpcJ(&DOqiA6=HTY zYo&I!Sf+L*Z7{oz|7>ytqD}6#dS=g=5o+%jz17~!tNi;+H~+qqtJMAvV%7d_-G=9- zy8gT!`^|xowbg+YBmMaa8UFlbpP7TNlvW3)cl95NO7kCjXQnya?Xdc$-F8*b{IqkV zZoYFgbgOghq1Dc@+e@6|7iT#q*AH|GS7*4V7Ef|dXQa8`z1`paKDCQ`rcYbXJs;N8xllI3xlkPHT)Y(Q{B)w&xwPl9b9qCd^K(|7b7k%p=jv3~ z@sC~PTpN<%{L(kay`Gru-iTf3-fSrDui+EjTOp}#QD9%U=(^8+cwU$)Kdqd4WO7rZ z(;`fTexatKud0N^7Mn*K{w9^eFG=N)lTsyczf`%tS*o7R$qYT5DUWTNDPfy(bhQ=P zy88Tuy2fV)(NRMd9Tz@+aylEmC_*jxQwV)Qu8m>&Dd==q6F4 zb(1^kx>?vPX;#=b9l`0*iOJrT?a?6pVN^Nw~(TdXTW`yqk@Kch#Nza^vdx9c%ij>}kgxgM9dQO3{tS|(&KkhjJxmGl|u5)k-6V5y+{{Cd#- z-rcYJ`n!ipm$~aJxa@!URZ`-QP|ABRWw%8Ilqo5>=2zwK{l2^o2vN$uhw{0qaql~y zw~zO!Hue=C(>l?6_ulLWUE}P}UE_S-p9JtI`|qzOAi(#}^LNVrx;KACfBs#GeO`?Y zNkZljnM6-Ji^w!0^N36&GLy(uB6EpMCNi7IbRzSKOsJ=wQDjPyIYlNFnN?(3k$FWX z7MWROYLU71w3Ca>E;7By{2~*K%rG*=$Q&b+jLb4J&B#0>6OGKYr=4nKu08E!BeRW6 zx2K(NWWqh|j3ZO-Y3Cf7bWc0$$h3Rfc}FJR)6P6H^`3U_k;(V8vyV(aGXF>dkPILx zKyu(|lYnFaNduAxBoRm^kW?VKK$3xE14##x4|yz5{G0CNga|qBzZ{okn|z>LlWp|Gl--R$sv+NB#THIkvt-a zL^6q_63Hc!OeC8~I+1)L3H7uYMN;Z%bBZJt$tsdoB(F$fk<22gMRJQI7s)P?UL?Or zf{_d(DfYBEMw0Alvy7w}$up8@B-2Q$kz6CmMzW2h8_744a3teM$~|q)k)(UttRrbh z@{S}P$vl#JB=<=2k?bSsNAi!H0LU4DoC3%>fSd%L_AEe71LQnFP6XskKu!hZTtH3+ zfPbLuyUjBnuZUGN{C6APXI literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Reykjavik b/lib/pytz/zoneinfo/Atlantic/Reykjavik new file mode 100644 index 0000000000000000000000000000000000000000..35ba7a15f4b679e754d6e7a62048fd13b1438ea9 GIT binary patch literal 1167 zcmd7QOGs2<7>Dt18fOfnC?X|;db=oro2Up}g%cK0v#1$}+9**%p-@4zD=~xULJ8Gs zsu!2h4T7MUMGz!vT9Y}AliAfAwTouiSW~C(`EXT>pjGE^&hN}{W;6dMx@}j@a`VTL zZ@=N`2}xG&)hBarNU9{F zsfn|ap0P*LJqP4zf06dy+A7aFS4v;=etmwmP+l}_(U&K)@@jjzzOGBlo6tP%569*0 z!XG-YDy5le#WGlUTL*`7GBh63;kG_`mprKNFWr$3H>>sIv1S=L7t+z)jq>SGm5fDH zKUY=AmlX{NQ7I|tg|27Yd)XTnCRcv3)xrQdfCa}c=iY+ z7v+#OkwuYJk!6u}k%f_!opxztZKquvS>0)uN7hFQKq^2=Kx#mWK&n8>K9q&TEHr!5bu4=E6-5GfI< t5h)U>5-Ag@6Dbs_6e$&{6)6^})@jQ{>O~4hDn?2+|5wTdw(eyGzW|fSO;rE@ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/South_Georgia b/lib/pytz/zoneinfo/Atlantic/South_Georgia new file mode 100644 index 0000000000000000000000000000000000000000..b1191c9fb693a185a34996aeab8fad9281672113 GIT binary patch literal 148 zcmWHE%1kq2zyORu5fFv}5S!)y|Hls)7~F$HfSeHQ`j~)HAPfR9HpdSLpW#0cWVZc1 dz`*$b|J?%&Kt9ks79ZabsF@55TtLH(xBzeV8Z`g_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/St_Helena b/lib/pytz/zoneinfo/Atlantic/St_Helena new file mode 100644 index 0000000000000000000000000000000000000000..6fd1af32daec193239ab6b472526fd3d6bdb2f76 GIT binary patch literal 170 zcmWHE%1kq2zyM4@5fBCe7@MO3$eHwPk_Q9h|Nnn1KvF=!;^P~_;10wf5JG}!KfpQ| Q82$qRep9)C#v5<}0M)@5+W-In literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Atlantic/Stanley b/lib/pytz/zoneinfo/Atlantic/Stanley new file mode 100644 index 0000000000000000000000000000000000000000..aec7a5d3665abb9372435e5ae285d94b62586e63 GIT binary patch literal 1246 zcmd7RK}b_^9Ki9nv|5V}I|NQsXQeo^eAAriX8p}E*ED?PF0Ml}A~cAoTc@m`BnYzZ z9V#R$=wFu(4SD2jZ6&=5p)6t~9SVZFnL)4r_tC-VP{;m{|Nryn!DENtcgfVqp=Rxm z59Niz8>Q9;3P>DX>6fqyI{h;T{21U+x$;S&}k$*QY3$H50lgVlM z^u8@-LZ@_}_lNMk%*gVuS%1aNgZjGX6aLEcF}>=>K3RP@B>ZEYvL;a`)(=<94S_|m zG47JJtKN#bV3l6?aZ=QKHNF1DoEiA?UT?UUF*jLRz46i&bF-Y#w;Ufen=X#&`rZR( z@Wgf596l*p2G7c^6+@!6eOR`B?-p&oglv0LE4F=Wly{M&tnZRfL^=A5OrJ!`q#ww*JzHMTupt=7mnZw47PGHzt#$k>t5Bja~e2_P9D zDIhr@Ng!DuY4Ee;;iwWpGC@*7azT9azc_qvT{^uA$d8f#E{I8 z)R5ee#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Adelaide b/lib/pytz/zoneinfo/Australia/Adelaide new file mode 100644 index 0000000000000000000000000000000000000000..4f331a87df4ed78be0d00b1a82b5c66c773069c9 GIT binary patch literal 2238 zcmd7S|4&tQ9LMo9)5Qhd39IWWYFS5lHIgf{~M(p2z56rtf2P3g-W z?_zUiP4Aprr4iWbhg<`7ZjHI5qGFo%ZEcORjIGVAt*ou+^E|UHSAT%#?A*t>=WciR zhx>X48iFl_)_)F{dBVl9%Us;Y_L`RiE8l$ez=^EOB4K8oZhaK?|DJYGGHY7A^AW{MuB#Gi9zWD719JpD|iI zCq;_Cn$nU#re)!YalPxyNhuxpK^7f9DP`dgWbyt1@osoS%AX$9B^A5Gx9*TG&3j!d zd||!YRV&LfTXorQhh_PN{Zjc^zuYsnT~>^ANY!Ao`1e)I%091DZz+(P&PKg=jbCeP z%5;@CUjv>ry)P|6SI>;n)ziP|{TI(`-Ph-|e&UoiyfdN?9Q{@rht5h<*qDfhGf6AB!slp5!BX84Z3l1g$7TTYTIb8w!f9C55E$x z9nV|(NbeSh}>Zhfv@+a9;GAWz0&&cC9os@3(+tU5}fNYuCE1@s?Wb6AO z={XjZZAV_z?FYm9#GY>5@l>ns?5fi!$Cc9=eGQzJwPUyWBU>*FIn2L09J%_w+_x%V zmHU(yPm#yg+ z(lDfBNXw9(Ax%TNhO`ao8`3zWb4cru-XYCHx`(t6=^xTSThl?Lg-8#PCL&!#+KBWK zX(ZA~q?Jf7k!B*@MB0h;6KSZe=_t}tq^C$zk**?bMf!>~7U?X~TFc>#x^hOlu7T6C zp8V1Njk~RAySZ+=xmVp+`AP1-y8BOftymuCmMkptkmMYHfhVsZ&y(Yy5BWv0e*=T+ B3P1n= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Brisbane b/lib/pytz/zoneinfo/Australia/Brisbane new file mode 100644 index 0000000000000000000000000000000000000000..a327d83b7696f39c01a9b5cdff1d4f0fd4b1f94d GIT binary patch literal 452 zcmWHE%1kq2zyPd35fBCeK_CXP`5J)49KU6A=Il}Ua5`i&!|CJU1!uO0HJn{;S#WMa zF~j+G=>p~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIUj8MqT0)Y$+{S`oS z8D?!@WMN?FS-`-F%=YmOVQ_SH0TIC=AZ3haW(1d-8mr9BP%}jyPXn zQ=SG*kC))mI1T=7iH5j?G~}~b4GsT6L;C_XZ0vo#{^b~%cIkVWzB@o>oEVhwn#(fN ze^erJ&&n*{UYQ-;D|5;lWzOGE%iOFIxnbHqjf`#3$U)V4E|1RZsn_`yk5>wHxv27PMZs}o(ulq%AiQ6LyMHeJtY+MqP`Xq7qQ%MT-OVY7-ESt(e5M3<$$ zriBIVdRK6nERV0%<-Z@56+a%7qR;or-Ge)2Wq+d-_tc2zK#8pC$di(7=~CKMrT47$ zXjy5luFgwSuRBKXjfv1TGXr$Z#IJha*kvvM`l410pVi8D`}O`4-$_;P1*!JGCpFq9 z5BPdy?PkBMD}PEJT+u4)vl?W>tyQuy(j%K9d|K(1q9{XmK~ycw-q zUJ2L6=N)~hZCoF2`c|9jXUHQJ6VhDtvuw>Ckw=rx%VRg5k(SW6rR9%q**3agd|!0P z_78m0da_P-9Dh-F9&XpiJ6m+u6Sca#xm*KW|DJ(CQ#@T$eQ>hp9B0=)^J{iq=yjQ& zcDw)D`}~5{UMIg`nU{}UE?yn)^uV^&}j zG9;@R6EY~P85J@tWL(I=kdg7f85%M+WN=n9I%IfOGd^U1Rx?6mh{za`K_a6>hKY<5 z87MMRWT?nkk-=KcXp!L}<3$E+H6uobY&By>291mw88$L*WZ=lik)b1FM+T3K9vQyX zj2{Vr)kFXZfz`wS2?7!YBn(I#kU$`jKth4U0tp5Z4I~^$Jdl7`O+=88SWQf~wB1_=!k8zeYLbdc~M@j(KFLvA+bY(heQtv9}+(#fJg+95F#-|f`~-X zYQl)b5eXy`NhFj=ERkR$(L}X_iNKBESB2l%Pup)6q0*gcz2`v&^B)CX) zk?8x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Canberra b/lib/pytz/zoneinfo/Australia/Canberra new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Currie b/lib/pytz/zoneinfo/Australia/Currie new file mode 100644 index 0000000000000000000000000000000000000000..a3f6f29a49617167750848c71e463faf6f3974fc GIT binary patch literal 2223 zcmds%YfP1O9LK*$V8lr!%PAKnw317B058ZXB{e`$NsPlaMMC6anFm27%hPXWo?c{j zag$|Bq%nHqRJv#zi;K@otZtKXZ5NVy={G-J^#-Rg3;O!D3?1Tg>afoA<(?U4JNE<3IgU6GB%s@#Ncztv#n3I=eL~ zZ$LM8?AA?5hc&sXQIr38TvKw3G&N$C;$}2h+&9@u@Dy4??|MrdO|@y^0-HYQwWOvL zyZJz$lB+{D!$xf8(u2CC<$K*abEj@A9@eaJ>y(mqR4Ko{q0}EoE$zcjr4OI7+h5zE zj3YfZd;c2w_J%F9Jx^JiS}c2Qjm@dpqq&O~+Ps`*&7YoRIa8J^=c;VM#SG=1{o4G! zV|GXXJ9g(lx-IM()uKb=E$@j_x-0aPE#AzPQe#ENJ3|P&;F}v^NS*>|xSZh1qQtgvRwXUN}bVntmy>=Fxm>`Cz_n9Gqnj9G_|r9=^^tJ#*O}vhQqj z@CyrWj8<#)C2c7lQ=9*DwWkefYka@9#T?PL%P(pB*gkc9W_tMDR_%DRUXS$cwMV-{ zw)45o7TQ&3k8N3Po^gL&5t09aOWO05^GDWGU5b=mIOd7@`{c>Wl7N;JlyT$-c+KPC zRN&utvd?+V-N_~Q8`3q0c3)&NW;dAaVD{tM*$`$&m@Q%UgxM57$F4Bj0`}$F*%)SL zuAQx6_U78z9Au*eS48V6U#7%>ui1?Q9pb zU(ALvJH~7ovu9w_m|bJG4eT4(IIweI>%iV!JDUe~kJ&!3e@p{_4q#dU^Z?TYpbMBb zVEOk>&?BHpK$n0v0e!+W3ezb}t1!L7Gz-%$*G{`I{lYX1(=klTFg*jB z26PQ*8_+kPaX{yQ)&ac(ng?_bXdkA3uAK&AI*4f@riYj&V!DWFBc_j-Mq)Y%v=Zng z&`hA4Ks$kcx^@~0bQEYQ&{LqPKvyws#q<@^SWIX2e+aFY`p(YIqQ4m>*;&49UsguR JJd`=t`zK#J`gs5V literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Darwin b/lib/pytz/zoneinfo/Australia/Darwin new file mode 100644 index 0000000000000000000000000000000000000000..c6ae9a7ba253089d4fcdb9668b70aaad20dc94ad GIT binary patch literal 323 zcmWHE%1kq2zyQoZ5fBCeP9O%cxf+1P9KW?o=Il}baXMrd$LZs76=$}`cAQ=AP;qWS z703B@r3xlSC}d!$n*dbLFnI+dD+5F83@{~NQhi4^Fa{F^659Vp4MWO zxnS86ZH(SHkS^NhBC?G&0&DA@=BTztH*0IAL!|RLt5?0~ZR>M({-3k6^Spb$A8%=G z)ja)!Q0EOdyx`nCPcY{=@=24ubY}Xm{odEq-?!2C!Koq}=*aL5w$r}8W!$QW#D70Xjgak(?KbD|CM7oqY;HrQQg+l@>gH0LSM-GD zFI{2_(rUCYHqO$ftXA5nY|-T;rJw)CGCGFst~2l1;-1;IqW|~f{r-97eRfc*`*v&1sVc4Q*r@!L zT-_7M(7HXdRp8rT>&tWO-hxF|xIDqUDN(jzdW02C9I>LYpKRl$3s!t_&`QqrSZPm} z-FN!DHoZEa&8=^%?8#1TX=+n>&2zfHc)u!g>$Nq#Qk4lCv@J46Rg>zh+EZcGzZF`| zP?ptxw9vNq&awwiPPGS*-C#SOy=o8H54O|ymHD=ZsjlRTcI6GLKI2O@B=&1})EPB~ zpHSn~m$hg388v-jdiec1?R~34k90TNqsRTW@6b;3?=QELipVwb=+fqh~&3hdOivsKJqT|1k_?AEoj zUCe$l8wPd^Y#G=yuxVh|z_x*X0~-f+?%LTpuy~-1Je+oBbb%|J;5{u=nAGSKwmJ8!E^>_4W>6h zb1>a;?X(BeAD}^)4q;k^=@F($K$kFW0{Vn$6woP5tAJi%nuX~Wrd>e4TssW|ItH{1 z=o!#7pld+efW85Z13HIk9j14f=3%;rX&=p$v=itj&`?ZAT{|ts^c2%nOjj{&#q<@^SWIUzt;O^fXfDuQpuIqUfd&H| zcI~tn=rPb_pvyp;fj(mzjp;O|)tFxE{}k*9Dn2JQg^nlXr=}#QCZ{CjFFBw30W2sWfHLWsma-AL&eP#v^T%(8N{$+-|e5_Ma8%@gH>pIoD+oYa9 zu7`E3G{YNOR9f3ENta{FcX5S`D5+8YgCPl&h1AGOpNz`%snLZ!<){8V)tJC*8T0nbvK*o<68lroTO;bDr-txewRtyw=rb#+A7`)Hu`RAIZ=KwE;7;I#JJ31u}ba zP!-PdOHo#W3j2G@oTT@vxW^kQ?)+EHefdC2{=1<{?>5W4KToOo&G*fMGk@r^hRbH* zt`mBZG?>MaZF)(`Ci8R7Qe7TgV3rP@rkBM}FcrPZ)pE~#S^g?pMcOkZdM`z-xS1p? zFL>3elO9=p;DuUK^GtqGca(~Dnzf}Jx-z%jRAt=O>r!u-^*@}`8@#8@hIjjQbw{m{ zf7a@ae@D%xt3~?Pi#z4FQ%Y?<7?Ca83f0!iJn_VI`Sj`<^Nn7PQ$N^VkW+WU>FPKq zn>}8~IdsN}b)2KWd%U6iurn#UDC~rCrt)FGeSX4UztVN~uiYy(pZ=}WK2>qmPxz6m zMK0IXUN3UNuJ($NOLn!_j9fHw)yQQd*Nt2_a^=XSBiD{xJaYBO<-6MJM+$JY6(A+J z+8U4|kSdTekUEe;kV=qJkXn#pkZO=}kb01UTx~^2Nv^ggq$s2+q%5Q^q%fp1q%@>9 zq&TEHq&%cPq(G!Xq(oO+BT^(%B~m6*CsHUEQ+gL6=YeEbwL&eSs7$$khMV;2U#6td7STSdb%@3=>h-P XQGS*wJrGFC5BPlnpFb^sG<5$6FIDiR literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Lindeman b/lib/pytz/zoneinfo/Australia/Lindeman new file mode 100644 index 0000000000000000000000000000000000000000..71ca143f29f2e5799b865478d05a0e88465d92f2 GIT binary patch literal 522 zcmWHE%1kq2zyPd35fBCeF(3x9`5J)49KU6A=Il}Ua5`i&!|CJU1!uO0HJn{;S#WMa zF~j+G=>p~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIi0T~apUKTTG&p6ef zbE&^Uw_;5L6C)Hdvp^sdh+<&qUIVm{Vb%slAiHM)11B=u$2Ww*(bWY+1c!i>F*1Tk zh7iL35eF&*`{xH(2Lr=@ASimf=n9Ai`43D3{RpB#{shxNzk+Cxe?c_J&mbD)Zx9Xg zJIFYY|3Ne;3_t)B4qzG>79ao$4=@c36A%D}3z!Cm4G4h32TTLQ2n0ak1foG<1)@RW Q#X!vf;{pYsuAu=J0Hh0uumAu6 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Lord_Howe b/lib/pytz/zoneinfo/Australia/Lord_Howe new file mode 100644 index 0000000000000000000000000000000000000000..a653e5166d29fd9d586719347d52ae447bfbe58e GIT binary patch literal 1859 zcmdVaTWpMJ9LMo@l#6ju$28p*-8!U7J7%ylMX5t+Yg)9UTGU}n%{c9&MgQ!gA=IHk zNNHR+B~p=yMMFvL64^8x5zz=)WOvJjG%h?u;*p4YzHcNhT)2~XCo`XE)875Rzwo?h z#YpFirBw30W2sWfHLWsma-AL&eP#v^T%(8N{$+-|e5_Ma8%@gH>pIoD+oYa9 zu7`E3G{YNOR9f3ENta{FcX5S`D5+8YgCPl&h1AGOpNz`%snLZ!<){8V)tJC*8T0nbvK*o<68lroTO;bDr-txewRtyw=rb#+A7`)Hu`RAIZ=KwE;7;I#JJ31u}ba zP!-PdOHo#W3j2G@oTT@vxW^kQ?)+EHefdC2{=1<{?>5W4KToOo&G*fMGk@r^hRbH* zt`mBZG?>MaZF)(`Ci8R7Qe7TgV3rP@rkBM}FcrPZ)pE~#S^g?pMcOkZdM`z-xS1p? zFL>3elO9=p;DuUK^GtqGca(~Dnzf}Jx-z%jRAt=O>r!u-^*@}`8@#8@hIjjQbw{m{ zf7a@ae@D%xt3~?Pi#z4FQ%Y?<7?Ca83f0!iJn_VI`Sj`<^Nn7PQ$N^VkW+WU>FPKq zn>}8~IdsN}b)2KWd%U6iurn#UDC~rCrt)FGeSX4UztVN~uiYy(pZ=}WK2>qmPxz6m zMK0IXUN3UNuJ($NOLn!_j9fHw)yQQd*Nt2_a^=XSBiD{xJaYBO<-6MJM+$JY6(A+J z+8U4|kSdTekUEe;kV=qJkXn#pkZO=}kb01UTx~^2Nv^ggq$s2+q%5Q^q%fp1q%@>9 zq&TEHq&%cPq(G!Xq(oO+BT^(%B~m6*CsHUEQ+gL6=YeEbwL&eSs7$$khMV;2U#6td7STSdb%@3=>h-P XQGS*wJrGFC5BPlnpFb^sG<5$6FIDiR literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Melbourne b/lib/pytz/zoneinfo/Australia/Melbourne new file mode 100644 index 0000000000000000000000000000000000000000..ec8dfe038c2d10aed29763ef8f664c8f0cd35c8c GIT binary patch literal 2223 zcmds%TTGU99LK+}!U#&qa>zjmwG<3Fs2G%z2S7+Ml$T?v1c+h>UqA&b>6eGmMM_$m z%$Oq0=%T`CO>-<_wXs5Ebj^bSvMs!v;XH=7j0d1+xk3v{-0;>p7*_azaM{j zZOsDxgJAawHym;|&y&p^?Q36eZ|{x!dC>o^1_w$4=g$<|P*+CaLPvv!TVete0~r=H zX}W?7Vl4RAbPGwoVIf~cTWI)Y3w`f*3%lBHx1Na7ZC`z>NzKbt zW}oh8+OIofPiksuji&ywPZ8<4inwyZB4<`xdm<~O}*Nj-IzydzU7M+z-fQc53yR8m;LnQU1|ft!>HB!;SM)5Gb;B6*;!P;65u{7H|HP zC@YE!v*PgUR($g(E4g~fN-tlqvf(}}?>lXeoV}<=-x$({why%Ng>G$X>QKej!+NZA zuPSpJRFz(>>i8mUj?7WblsenutF$e@6x!C&EUW!2&9?Q=vB%$;W>1{F#h!d=+@7)@ zY*>25yE9|-YbvD8G*EKQdKX6Gc-@8Auj-B=eNiTLN^zYAW^8H$#oyAe;=QW>? zQ~rP7NvnI!+sQTW8`4cjL&Skd%x*lp?O^uf*=-23Bg~dCd%|prpJP{;Z2|l8>^6qk znP;~(%-%e^&0%(j*&eVzV1vL8fh_`i1U3om64)lNPtR_nz)n58tz!1-*=-iHThDI0 znEhfljM*_}%a}a_o5t)K*fy|lVB^5ffvp33_v|(g?B286KCpkF0hkV8T7c;RrU^h7 zFm1r}0n-RfCorwR^un`i2BsUHT{|%Sz%&HY5uhbNPk^QXT>;tx^aaxxpfi}(V0wdT z4$vLXu0241fCd2_0$K$02xt<}C7?|}pD>NWbPCfdOs_D_!gR~CYZsUB*HECNKudw10!;#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/North b/lib/pytz/zoneinfo/Australia/North new file mode 100644 index 0000000000000000000000000000000000000000..c6ae9a7ba253089d4fcdb9668b70aaad20dc94ad GIT binary patch literal 323 zcmWHE%1kq2zyQoZ5fBCeP9O%cxf+1P9KW?o=Il}baXMrd$LZs76=$}`cAQ=AP;qWS z703B@r3xlSC}d!$n*dbLFnI+dD+5F83w131_w!EjYV8bOI;; z?**LOBo;_9CM}R!UcW$7HD-b4`cn$7h5HrUSko2Um1`EbziVFL$sD)9^IYlzCPpx1 zWl_ zF#HFCl4W8~Ks3lhU>fKt5DoGemp~g%LW!+83t9)vka>5Uoog_u4hm`e}+Mmb0UN0`gIIUj8MqT0)Y$+{S`oS z8D?!@WMN?FS-`-F%=YmOVQ_SH0TIC=AZ3h9)5Qhd39IWWYFS5lHIgf{~M(p2z56rtf2P3g-W z?_zUiP4Aprr4iWbhg<`7ZjHI5qGFo%ZEcORjIGVAt*ou+^E|UHSAT%#?A*t>=WciR zhx>X48iFl_)_)F{dBVl9%Us;Y_L`RiE8l$ez=^EOB4K8oZhaK?|DJYGGHY7A^AW{MuB#Gi9zWD719JpD|iI zCq;_Cn$nU#re)!YalPxyNhuxpK^7f9DP`dgWbyt1@osoS%AX$9B^A5Gx9*TG&3j!d zd||!YRV&LfTXorQhh_PN{Zjc^zuYsnT~>^ANY!Ao`1e)I%091DZz+(P&PKg=jbCeP z%5;@CUjv>ry)P|6SI>;n)ziP|{TI(`-Ph-|e&UoiyfdN?9Q{@rht5h<*qDfhGf6AB!slp5!BX84Z3l1g$7TTYTIb8w!f9C55E$x z9nV|(NbeSh}>Zhfv@+a9;GAWz0&&cC9os@3(+tU5}fNYuCE1@s?Wb6AO z={XjZZAV_z?FYm9#GY>5@l>ns?5fi!$Cc9=eGQzJwPUyWBU>*FIn2L09J%_w+_x%V zmHU(yPm#yg+ z(lDfBNXw9(Ax%TNhO`ao8`3zWb4cru-XYCHx`(t6=^xTSThl?Lg-8#PCL&!#+KBWK zX(ZA~q?Jf7k!B*@MB0h;6KSZe=_t}tq^C$zk**?bMf!>~7U?X~TFc>#x^hOlu7T6C zp8V1Njk~RAySZ+=xmVp+`AP1-y8BOftymuCmMkptkmMYHfhVsZ&y(Yy5BWv0e*=T+ B3P1n= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Sydney b/lib/pytz/zoneinfo/Australia/Sydney new file mode 100644 index 0000000000000000000000000000000000000000..aaed12ca284d69e3a8ba25891701790bde7f6743 GIT binary patch literal 2223 zcmds%TTGU99LK+J!U*z^#hcz9juzmF#>=xf&*QB8Tz$G2N;Qq)uamE)Uz1*(Qzdx_b_iJT#7Du6<*L*%s z`Tu<R0gAQ*YybcN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Australia/Tasmania b/lib/pytz/zoneinfo/Australia/Tasmania new file mode 100644 index 0000000000000000000000000000000000000000..07784ce5d751f040ba7ab43713f32cb92ce9afb1 GIT binary patch literal 2335 zcmds%YfM&k9LK*8ml5P8%PAm`S_v2;ctJ?TOMsw~7>@{~NQhi4^Fa{F^659Vp4MWO zxnS86ZH(SHkS^NhBC?G&0&DA@=BTztH*0IAL!|RLt5?0~ZR>M({-3k6^Spb$A8%=G z)ja)!Q0EOdyx`nCPcY{=@=24ubY}Xm{odEq-?!2C!Koq}=*aL5w$r}8W!$QW#D70Xjgak(?KbD|CM7oqY;HrQQg+l@>gH0LSM-GD zFI{2_(rUCYHqO$ftXA5nY|-T;rJw)CGCGFst~2l1;-1;IqW|~f{r-97eRfc*`*v&1sVc4Q*r@!L zT-_7M(7HXdRp8rT>&tWO-hxF|xIDqUDN(jzdW02C9I>LYpKRl$3s!t_&`QqrSZPm} z-FN!DHoZEa&8=^%?8#1TX=+n>&2zfHc)u!g>$Nq#Qk4lCv@J46Rg>zh+EZcGzZF`| zP?ptxw9vNq&awwiPPGS*-C#SOy=o8H54O|ymHD=ZsjlRTcI6GLKI2O@B=&1})EPB~ zpHSn~m$hg388v-jdiec1?R~34k90TNqsRTW@6b;3?=QELipVwb=+fqh~&3hdOivsKJqT|1k_?AEoj zUCe$l8wPd^Y#G=yuxVh|z_x*X0~-f+?%LTpuy~-1Je+oBbb%|J;5{u=nAGSKwmJ8!E^>_4W>6h zb1>a;?X(BeAD}^)4q;k^=@F($K$kFW0{Vn$6woP5tAJi%nuX~Wrd>e4TssW|ItH{1 z=o!#7pld+efW85Z13HIk9j14f=3%;rX&=p$v=itj&`?ZAT{|ts^c2%nOjj{&#q<@^SWIUzt;O^fXfDuQpuIqUfd&H| zcI~tn=rPb_pvyp;fj(mzjp;O|)tFxE{}k*9Dn2JQg^nlXr=}#QCZ{CjFFzjmwG<3Fs2G%z2S7+Ml$T?v1c+h>UqA&b>6eGmMM_$m z%$Oq0=%T`CO>-<_wXs5Ebj^bSvMs!v;XH=7j0d1+xk3v{-0;>p7*_azaM{j zZOsDxgJAawHym;|&y&p^?Q36eZ|{x!dC>o^1_w$4=g$<|P*+CaLPvv!TVete0~r=H zX}W?7Vl4RAbPGwoVIf~cTWI)Y3w`f*3%lBHx1Na7ZC`z>NzKbt zW}oh8+OIofPiksuji&ywPZ8<4inwyZB4<`xdm<~O}*Nj-IzydzU7M+z-fQc53yR8m;LnQU1|ft!>HB!;SM)5Gb;B6*;!P;65u{7H|HP zC@YE!v*PgUR($g(E4g~fN-tlqvf(}}?>lXeoV}<=-x$({why%Ng>G$X>QKej!+NZA zuPSpJRFz(>>i8mUj?7WblsenutF$e@6x!C&EUW!2&9?Q=vB%$;W>1{F#h!d=+@7)@ zY*>25yE9|-YbvD8G*EKQdKX6Gc-@8Auj-B=eNiTLN^zYAW^8H$#oyAe;=QW>? zQ~rP7NvnI!+sQTW8`4cjL&Skd%x*lp?O^uf*=-23Bg~dCd%|prpJP{;Z2|l8>^6qk znP;~(%-%e^&0%(j*&eVzV1vL8fh_`i1U3om64)lNPtR_nz)n58tz!1-*=-iHThDI0 znEhfljM*_}%a}a_o5t)K*fy|lVB^5ffvp33_v|(g?B286KCpkF0hkV8T7c;RrU^h7 zFm1r}0n-RfCorwR^un`i2BsUHT{|%Sz%&HY5uhbNPk^QXT>;tx^aaxxpfi}(V0wdT z4$vLXu0241fCd2_0$K$02xt<}C7?|}pD>NWbPCfdOs_D_!gR~CYZsUB*HECNKudw10!;w131_w!EjYV8bOI;; z?**LOBo;_9CM}R!UcW$7HD-b4`cn$7h5HrUSko2Um1`EbziVFL$sD)9^IYlzCPpx1 zWl_ zF#HFCl4W8~Ks3lhU>fKt5DoGemaW(1d-8mr9BP%}jyPXn zQ=SG*kC))mI1T=7iH5j?G~}~b4GsT6L;C_XZ0vo#{^b~%cIkVWzB@o>oEVhwn#(fN ze^erJ&&n*{UYQ-;D|5;lWzOGE%iOFIxnbHqjf`#3$U)V4E|1RZsn_`yk5>wHxv27PMZs}o(ulq%AiQ6LyMHeJtY+MqP`Xq7qQ%MT-OVY7-ESt(e5M3<$$ zriBIVdRK6nERV0%<-Z@56+a%7qR;or-Ge)2Wq+d-_tc2zK#8pC$di(7=~CKMrT47$ zXjy5luFgwSuRBKXjfv1TGXr$Z#IJha*kvvM`l410pVi8D`}O`4-$_;P1*!JGCpFq9 z5BPdy?PkBMD}PEJT+u4)vl?W>tyQuy(j%K9d|K(1q9{XmK~ycw-q zUJ2L6=N)~hZCoF2`c|9jXUHQJ6VhDtvuw>Ckw=rx%VRg5k(SW6rR9%q**3agd|!0P z_78m0da_P-9Dh-F9&XpiJ6m+u6Sca#xm*KW|DJ(CQ#@T$eQ>hp9B0=)^J{iq=yjQ& zcDw)D`}~5{UMIg`nU{}UE?yn)^uV^&}j zG9;@R6EY~P85J@tWL(I=kdg7f85%M+WN=n9I%IfOGd^U1Rx?6mh{za`K_a6>hKY<5 z87MMRWT?nkk-=KcXp!L}<3$E+H6uobY&By>291mw88$L*WZ=lik)b1FM+T3K9vQyX zj2{Vr)kFXZfz`wS2?7!YBn(I#kU$`jKth4U0tp5Z4I~^$Jdl7`O+=88SWQf~wB1_=!k8zeYLbdc~M@j(KFLvA+bY(heQtv9}+(#fJg+95F#-|f`~-X zYQl)b5eXy`NhFj=ERkR$(L}X_iNKBESB2l%Pup)6q0*gcz2`v&^B)CX) zk?8x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Brazil/Acre b/lib/pytz/zoneinfo/Brazil/Acre new file mode 100644 index 0000000000000000000000000000000000000000..788d0e9ceb073af24f064c34e1458979afd10903 GIT binary patch literal 656 zcmcK1KTE?v9Dw0#Z3iPNf(RmN?Ke;n#OcJPgW^>)iGx$Y%|#HG)!$*Ti*t0EYF(824p z2-j0OlG+r}@{No>dQ|MPrY91Xvb>&Oy&UKE(6St-_Tp)GocoH^H9ccnld}ulZ1X2M z_8-DDz8^7P={N*JBDtDSNGv2663x|wL*lubfJj6nBoY$|ibO@iB5{$xNMu(N8j0;{ zf+Nw9@cew@BLg5KAVVNyAcG*IxSC;*aa_$n$VkXg$XKpsFl03R_u+hI!;tR_2W=Xp literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Brazil/East b/lib/pytz/zoneinfo/Brazil/East new file mode 100644 index 0000000000000000000000000000000000000000..552ce7c29228ef36e2dce0ece5e782ba67d2b607 GIT binary patch literal 2015 zcmc)KUrg0y9LMnkl^ZxUxBgkFUt^9DC;Y-8M6)s(0>QD6pN3?nA_!8cME)!Ckm=gk zbY$g5t37k8x!m$`(}^w{IZd5&q@%SMBFRlfu56~KBXxS-kC$E4oj-QY>x=>8;`{yt zHI4Oop?^KMx$p4eX>%_=kBPar4?b%?&^4}XuFqjc!*^z~dDKdz$4qH!)Q3i&G8xr(+KfwUWa{+a?bP0rk~w9ZW*!|fS)nVM z^*eXbw{3ZBr>QtnZ=*|JGxIu2?K5TX%ClSQ zv{Gi6`L!il6?(t~MOhmBR45BGM(e`U_e*u^Wv$*bB8x8dXw5qV^8A@$ySSlSUf6rq zE-BqDFShTqOLM<8%bGgu^6|#hmcMFi6Gx>kdx5Pxx5BKvt65j>pKBVf2HMzJE30}7 zv}seZyma6`eYqx6UfG$@)icv%P3t{&?Sx@zUL3d0f1Z_=S$($U`+c(R!7uE(V`t3z zk&o?$y+6vv{+QkL)(-RfiI4P+oN_crrg@EWU{v+6iOzC22*ZN zCcphAlvWW9LUZO<@fNrXX7n~`j7&V3Xu|#8j&KADv>h1u1=&-ud5U(6{!^|7O55~ z7pWI17^xU38L1g58mSs7+w1B^3irCoksA9<4rD!$1wmE>SrTMTkVQdO1z8qkU66(Gx|Kne#_QGw zSsbrh9b|cs^+6U0Ss`SJkTpUU30Wm%nUHlt7Ru{Z3RxZ~gA-NvGxFsiX%W9 zEl#Vbd?OoP-84=+bTjNzANjlt>xmB%g1#96a)$c1tYElp`gTd zFccgL5Cw?>MZuzgQP3!G6uh_&kb)T3K~gX&pd24m3M>Ve0!%@sKvS^kzkz$se$x8_ Dipc^V literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/CET b/lib/pytz/zoneinfo/CET new file mode 100644 index 0000000000000000000000000000000000000000..4c4f8ef9aed8bf567ce727c33ba1b97da6f6ee7d GIT binary patch literal 2102 zcmdVaUrd#C9LMoP#8PJTMTJ8GViBQre&hSvH!T1Zx74a>FV%Kpu{nX^A*^?N^8ZC!O&zq4nrXXm+jE}qXj zylz`lf$tyZYWEKpXP3LUpV~Lgzp-!p%*Fi^QS}cT*MT>sgWY>IGPPBs=U3XPSNs}# zF=(GWUu37B&azJ<$@W?G5?wpcpoC{<*>&9^SzCz`8?!Y%oT3?3%WP)h?@G%2Ny#(5 zl0Wv8{FmNR%DJPu{_{hc_1<$zJ#o^~jzlee|3RDGBg@$Sl+9_5*bN&R?8eHlWtN8Q zra*~hrDfaA(^4!uE~xCwMVk9y{6$(XB(jDlqVa=J%daPWQ*kZ9Ad7#@Ccz zH(>>lAuB8!wxTsJSaEv4l@xSX$z+!;NbS{vvzu(;AMILnGHADdS*y~)dX)_?(j7aO zYH?qhmNaE*Y4<|Avpi10##CFDbzY(1q?N~gq>9Y1t>VIWwmkM7TRt{oE6%-SE2FPk z<>=e0+S6mzM-J<*mKIyp^Nd#Cvs-JLH>>8hW`!#&bhp1+YXkGN_DY^=lgh33i|M-e z@?2Z@`Y&2P9&Z~CC0gCk&vsweC9CfpvyJuRwrR(Yx_|K-)=)R9#*BToxh$%tDaW-X zUE1>fUTvL>Xxm7q9ylA;gD1`I3R&QB7uYgiG`;N1`-WV7Y-yI zNI;N?AR$3wf&>ML3KAA1E=XXI$RMFXV&mz8gG9&Eg$Icb5+Ec(NQjUaAwfc-goFu+ z6A~ySQb?$fSRuheqUGtrg~SU97!olgWJt`ApdnF1!iK~R2^pZkl-QF^K{`u v;^*lCh(r(xAreC*h)5KXFyj9*j`6OP^YdwDpsFyZI43Vqm7j;Xh4Fs@!4?eK literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/CST6CDT b/lib/pytz/zoneinfo/CST6CDT new file mode 100644 index 0000000000000000000000000000000000000000..5c8a1d9a3ea46457985198597d90f95462a70168 GIT binary patch literal 2294 zcmdtiUrg0y9LMqJAjKa!NEU@iP+BtNfPkTBQWh|FCY1QEJ@bm3u8lI+uGXvZOMWwP*028h zBHaWB(pBhnuTkAz<>?$VVOz%3q)jmrUOQP|SN^M9zj#zfWPK|UyHa&j+}jd$_*E5c zo|nnzyL8O=M~(MnvyMHn$4q%+kBZyhWNvu2Nlk67FgG?-sGF+tOuRo&-Mk>t+>(~4 z5@yVoX^|5uF>IP7{+^(xUl^C9p=swUL=17@aT&eC_c3E;$7pi$VU(5W6B$YS)lF7RmR13zxGz&jJukJqgmRWRULg&B# zoVjQBh`#qFF^d~d>igPv$^B)o=z_XNDV)=;iwc)Y(I4-s;*5=^czD0^d&`W!f4eFf z%{C9b{DOY)%n~Wx(W;jm&yup5YF)l>sw`cUua`Ank>yD#I02#Hzm3u!XpibeR{*ipXITAe)agNklEOhuAVq@!8BHQmF)h+Y|0%| zPd2`0HpdUCrm{}c6dctpd419{a!PMa=#Z_0$8_t}^|Gy}OK(40B0Jie^-}{rX1FB%TeJkxN4=@f7FYqr$6tc&V zkTD^HLPmuQ3mKQw4h$I?GBjjt$l%b8jt_>%+3|4<5E&uI5Row=gG5G&4AW`Hi44?f zN6Il&WURpM@Ekf9~nOq z03-q&AwXil5dz!3}j9!L0)_;Cafi6BP^kr;9W z5s4xaMkJ0%AdyHSp>*0Zs+IS)XMIy=(QY5AvK}Dj<5mqFw9Dzk5%Mn_q zjV%&fr;RQWUZ;&O5@004NQjXbBSA)@jQ^W3N84J@v(@&M&dtop%<`3HXJ=+-W<~!E D%injH literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Atlantic b/lib/pytz/zoneinfo/Canada/Atlantic new file mode 100644 index 0000000000000000000000000000000000000000..f86ece4c4032634b3526187c5f86364077b9a9b1 GIT binary patch literal 3438 zcmeI!X;76_9LMpCm=UE?iki4l7HWtOD27Y%SQ?{(oMcf_@MM~SrDWogggR--OsP$o zWk!pJOQn}ELruKyj%FPo4NLK;p=MfQ8*bdr_q^>z<4iBR%yU0?81C@y_x-0nmi=Um z=O3?|%x`#j)tZOTE$5iGIdvbuzwE~v-#dp}I7Mai<$Ifti4Wgx?XGZ2oR!a~yB~#h zbc$2n_Z43o?p@t~tGoJWFYnrR@40Ia_H{nKHrM@R+iT9Jzf`;HUTJXFe>uqY&Q5bm z)-88G8=C7aU0C35=)A@Ge3EoGhWfl;4135e3u^7%)cGghrZbnlTN)qmZK-JGZ1rTi zTi=cLmS5W7Zd?4lOJyZI-dqc)L`+86E?Ymsy z-QP<1_8*bn0}D>d%DaEht&|s3Ro-F!t^18Slvbt>cPdfe#V*s;p4Zip(0qMF=c%JL zv*fWgQ$GXsEhUKSWcHHKqOtr{-XtnT6SqR%>}(K0ol4~` zA#aLc&k`AYafY~6PnWkHc|$ezWyofmrm5y@@^r}CBh~GNBlI0J`>Gb%eRRu=b}Dpm zm~Iud0MZ1a`*?#+W(V@gAJASZ2bjrJ=!{^Qy zccp!+@6P!^b&lPr?-}xwhW+6Btg zw9O**YOPFzEjZqQScC#tCp6>?hK0x_+oRL*FVBW9E@ku&QP z#q&ke<*e!`kzbG@UnmO^1zG*%?4_54r%}KS{scC@!7tqCKltze-tXTfJs!XRkP2w* z_wV(2g6!wp?0ZOJQmSXbK=Y)SXM}k~su?PeC&0d?-oU`s+wbw8{CE7a3h-c#-i%1{fJ(WQdV5Mg|!fWn`F9d zh8h`bWU#Gfw2|RP#v2)MWWL<0#25)Z2h2oez_BuGrq289nqg_j8n z5EmpcNMw-EAhAJ$gG2`j4-y|FKvokWBt%GzkRTyZLc)Z^2?-PuDI`=#tdL+K(L%z7 z#0v=+5;3a@84@!jXh_tMupx0n0*6En2^|tUBzQ>lknkb#vzh=R5ww~RA~8gQh(r+y zBN9g>kVqtvP$IEJf{8>E2`3UyB%nw{ttO;MOp%}>QANUv#1#oF5?Lg)NNkbdBGE;{ zi^SJz0*pl1YC?>}*lL1|L>UP)5@#gPNTiWaBe6z;jYJy>Hxh5F2{;mQs|h(0bE^qD z5_Kf(NZgUYBauf!kHj7cJ`#N-{7C$f0{}S!tmY6vjsdGV2#}+|Y7PVBI6w{r{&j4hH0CKn@4wct8#a@IWUkT135I1V*@!j@c*NO gGvr^6j$m_(^fEU|WKz$lm?$SQDLOhTI?4(D8&gR`p#T5? literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Central b/lib/pytz/zoneinfo/Canada/Central new file mode 100644 index 0000000000000000000000000000000000000000..2ffe3d8d8e012445aa961fc53b38934681dd2a2c GIT binary patch literal 2891 zcmeH|YfP1O9LImh8;GK2VuS>y6+;mL#qdH|>KG(~dPEd5H4L$g#~X?l{56@ES8A4* zQKXJqt`$0$;$>uN&aIq7lq8K1m7GdGsavST>HqoH3%A^S)62eR|L61Y49+=k_dv4M>sp);n{V{&OoR@O9HfWpF)^Bt#n76xdGTXgaw?%*6wZW;YEYx4t z&3Eccv-DD)*J&t9)XT|3oGa79^=f3W)0i|ue(gTky%rHB*KYT8ubXdky{g&0(byn2 zYmU23CyvW+8xOm8^2_vY>`eDw*A%_)Um*9JbLGLcOldivEPtLHCao3y#rJur$kssd ztZ9`ti*HHWc_G?vMkj5b-l82wHmP4so%;7atsT1^(E!<_0q#!iRKH0(*M1{`M;2+9 z13RSauJ@(ewpH@Px`oocaF#rommxi-BuP+mg7h33DNjX)NN}Gq`m}$GgtUp!klVr9 ztD&QYR^8WUzP+JgdoF7Dj#JvZpsPIlUb*&}^t<$(^MgDWUoFq4d?hageJv6FHb_Kk zl|}~4m&m%k+V8qo`d5CaQPqwPC|#`4dnf3?qA41)X|TrT7D(KJo;oNgQwC3H*7%5F zG9>ONebFye5__c=wo?leVx8svQkFoAJJEeOZ3&W zojNLKg^t>vUq5n2riljOiOH+kna*q}^&vurUZqjAvQ=H{Ri*!Xrlv6Zqvaa0P#cAWQueR;} zx%vCYM_t=@_-|j2{lG_kiHD#0d}TL9e7*y_J?(tHFSd)nz3*(V-5!2EHq|SmMw>Iy zoXO^-d(FSNh{xlx5b-`<4~NaKT0J!LH)cMwoGZIOfat=~C38E;7 zrXZ@aG`fN)%hG5IqArNOAPR$M45BiK&LB#IXwA~74Wc)Q;vkxXR(0Hlm5 z4E;eA2+<%!g%BM=lnBuxM2(h4j}S#dG|5mUM3)R@GPKE1CqthMg)%hCP$@&F45c!( z3Q;RVua-u!5Y0kV3(+k^xe)C_)XUH>M8OOVGgQpbF+<4=En6BjL-cHE6b;cdL)8#n zLzE5CHbmVJeM1z^&^Scp44p%i&d@qU?F_wJ8pSg-&rm%>_YCDjw9imKME{HgKr#SH z0VD^OCJB%%K+<4o^1w)hrO5;%6_zF!jATHv0Z9iWACQDVG6G2nBqxxhK(Yc!i>1j6 zBr%pIGmO+&n%ppw1IZ2}J&^n`5(LQ*BSnxLL6QW?5+qHKJV6p=X)*;#m8HoQBw3aw zTaa`?^2JCPBx8(}L2|}O8YF9sv_bO5NSvj~93*v?CU=nJS(@xY(g(>OB!Q3&LQ)9H kAtZ_Ne-n*Bjr&Y4hnQ?er4EXYi;js(jg5_tjgATU3t;rI(f|Me literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/East-Saskatchewan b/lib/pytz/zoneinfo/Canada/East-Saskatchewan new file mode 100644 index 0000000000000000000000000000000000000000..5fe8d6b618e34c4c87a7eac43f7a27af41161d02 GIT binary patch literal 994 zcmc)I%S%*Y9Eb7Wl%-5VyqJMViy|}(1r3t+8kQv%jsz`aOcZA2p+6uFEegTK3$7jA zL`0A)FT<2XUdjsyW0`kP-ZCvYM2iUO_&%@gbmPi1yytT`%rJ}R8@(TIz9RdsljaSF ztIQmpb6s zu9t}GFYyQN%A;F)^=5^;R$r{w3k%$h$}06WyIeLe6{*di`Ley?tMAiO@?#{ep>QT XtO!|>)vO6w6tXHj`elX9)XKuUTnF}q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Eastern b/lib/pytz/zoneinfo/Canada/Eastern new file mode 100644 index 0000000000000000000000000000000000000000..7b4682a39e2fc97450c99eed4576d2a4614bf294 GIT binary patch literal 3503 zcmd_sX;76_9LMpiA{wHIikTw0q=>RgxKd&+gnGpVOmRCJdIvQ*xgZo`3u-tuOrt@i zgSl@RqGFJSpr$rWiQ=B((UcTH5AL~eJKyJRFB;!9)8jn%bH};7{eAy2aS7AA+y3#? zH~+)SbHluRZ}EZo$SGJZSO0KRe6uw0=$fzdBG#^O)$3xnM66#jQE%vyEH+N>r#HC| zSDOd7*PE+?)mHDH<sWb@>Y$MI~_yBuGCPu zJJMg|Pw^}*;Qbtrza{wZjzI&4psM?CtdqXCoTFP~_6qT@YzLOfAr^RMcY5f{~| z*sZ0f+pQ};6ZUE8S=-{`b6&Gc&xa;OoUh#^FEpPgE|h1;i(5Vt&aT-BA&_mBb0EBxFj0{9}o@O zmB<&Gt`}~$b<#~Q5slPh>3(jlYFzA)O$yVM$Ci(E)32s0&x~ohS<0KLc|xRaF|Mof zit^Jfd%LR_{Tk_39sI;g4XbtQx{XBZJ8rVgb+$rJ68Y`Xo2 z%SDISE4t&1T=h!WA>C=n=c=>UM(x{muJWxtDE-`Gm0$S=*|oB#>Q=N=`j^FsfSd#w zxT}WX!RqIS*FL#jKfp{p|Fu+tI3o;p(w-_t=vPq51oSuSE^c#s@5ze0@m_LMQP zM@7t&yK+p}Rx#$9haT&5SdBeYqsP^5R&ndg^!V~+YC?Lxo>(+XO^VOflXHeEVc#vM zES)K)24u;%9d;4lI9X1M3=-3-f_&pKA4-bwP- zvvbQ-Vr;FJnfdD7Fs0`tW~;eg2lTw?6g98*l1%EAC6dZZWOA!ykzBM+raX!h@8v9( z@1G49sc8zO;$Db)M&6J(uVC^?&NOPG|lKo6YGwQe4Ny=`7q~YiNCU zw?3N=v&Yy54K(j)^S))?5iw@GY_>YqN6f#EUZwe=HF}Tu3-dV5Gv`)v6*7Xz5F%rU z3?ed$$S@+~hzuk$lE_dZV`*sy6B$iQGn~kHA_IzyC^DqTm?DFUj4Cp$$haZ{i;OHX zw8+>ZgNux=r5Rpid@aoYBO{CqF*3%;AS0uU3^Ov$$Uq|_jSMw1*2rKZqm2wVGTxSE zz>yJ0h8!7lWYCdOM}{34cVyr#&B!A|Z)wIJ8GK~)k>N+i9|-^w0VD)S43HooQ9#0g z!~qEe5(!Ha3M3XtFpy{<;XvYn1O$l)5)vdPNKlZdAYnn`f&>PM3=$ek6B{HrNOX|! zpcNm!5Fj2CAs|FZjF2E9Q9{Cm#0d!$5-B89NUSVPu#jjW;X>kt1PqB75;7!aNYId| zAz?$}h6E0Y91=Ptc1Z9nP4tlPA@M^3h(r(xAreC*h)5KXFd}h80*OQt38kfpB@#?a z6HO$XNIa2%A`wMGio_HNDiT#BtVmpuz#@@FLW{%}39hAyE)rfOzDR(P2qPgzVvGbC zi82yqB+f{nkw_zH`aJQ8^%^p+;} zNboI9^pWsen)o9J0CEH%hX8U6AO`_*6d;EIavUHB0&*lEhXQgeAP0k`IU10|!O|QL z$N^z#jtJzCK#mFIpg@iaf*c~q xF@hW<$WekECdhGu94NNuyCVLl7mAxXT*Ax&<8KcQ>>e2GZx0Cx3<(T&`x_HwlD7Z= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Mountain b/lib/pytz/zoneinfo/Canada/Mountain new file mode 100644 index 0000000000000000000000000000000000000000..d02fbcd47f845bd101a7ec97150df7821b826357 GIT binary patch literal 2402 zcmdtiZ%oxy9LMnkp}>tlrxHU!BP|I6Kk2@lt49^Pju^YP-?iSu291NzOOVAZ|rW!>%DuJ*iiN$+i)DfZQWqI;c3 z)mx<(WnWaU>d*LC_D_8zK5Y-vzJA0`OpA*l>$x9VtYA{Rs`(s5= z^&CB;_^O(jGpU19$5cq%Ssf}5D~A}-j`2O}+Ved+?97M=Kir}tKI{>ZZ+A=idW*QO z{RMgbmRfOxr$)|NoiCya?w7N(62y%Ox5?-Qd1_9mL(UD1S95<|q+`AfRk6MhJ@3em zYX19|^-Vp;Rh)O8y!rV7wV=!|7uJ6!ZgGAiZ(Z@8SQK?s#wThKKXpzgI5vocu_HS1 zvRm9fyjLfkYE;Qx+jYvp61BLwPN%-QM5WcW%Jhx1RYv|gxuj%5IpZIYccg!*mIf!v z%$Pq!=EX3XHF-v4ANyI}`PGnEw%?)e8rm(E@AygI-MLNVG@Q`)w05d{i}vgLYPPD} z#johR+_ft2w^5m&+$8c(^~r+pDp7E-U9Py2BMRT>)hka|DpymRe(;0ks;JVVi#y`f zL(2+vi8oM{#wKfb*>}o)HBy&5kE!zSlVrvG3!-8)Lav?~6>Ij5%ZJDML}jZ_J~G@c zs%j3&wO#AQqpp*>x~)w;mV7`zUguFY;X8G0exa(p;?;HW$*S&nh4utTD$l#wy8ee> z)cTH9@`;lX;z@6od}?4^G?d54#vMNKwDT{yq2Z9&7ozY ze*Zh&0YQHMZY@IWdzk%{D_w5k$~8}^c~+UH*llJbM1cKp|BJaz*uUdH`TfienI1Af zWP(;RLu87`9Fa*PvqYwe%oCZY)yx!`Dl%8AnJh9}WV*gUcX6ne?k;!w+9+^JJ{E-B3WB^G4M-GrAaAW~V14kZ^L?D?!Qeic@K$3xE z14##x4OvUQe!o_L6XCf9V9(Sevkwq8A4KoO!AORY6m#SlNis*4ku-DU8A&up zrjb-zO|Fq-TTQl+bX!fnk%S`|M^cXD97#Hob^ITv-C>Hq)RaHTm64L3lA7d7OG`;h HNp<`UV`q_f literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Newfoundland b/lib/pytz/zoneinfo/Canada/Newfoundland new file mode 100644 index 0000000000000000000000000000000000000000..a1d14854af6b82bfe814f2c64aac01bc053d5fcb GIT binary patch literal 3664 zcmeI!YfzL`9LMp+3%4}yh1kVX0ZWmK)@oU%AV*uo4Cz>#89ZrJ2Gyfx8HM9?nNnve zyG#aDR*(`ArK`qfH+xddQPYm&Mz@U+Ei@N1B)9WD_Qsl~rkUQg%=&p))Ny?A|Ne5X zEu5X^`qL{$f5MwrtA6v`{{j8W%I*2xo?q3}QRXXotbD_sMHOow4!qzFdtO}8-}};% z4$mt4xc6nN(euj8GXt+?Z1Pl&{UuN}_(jiaXYUKF?y|(Q`q%ovn&50t^%v!V*S9Th zsHravyisj6)V@;YeRJu=hPnqI@V+%Ssp0L}`QEkJu?_1cW_W$W4mPY$NbqhrBgV6_ z$N0d;wgaAbS_cQ}cW?9f%d&6tZ;z~e_wAavO}9wn_g9I1y`tp4ogX=YMq7SSHCHtH^W>M0`j9}&F1Lw=XkCYrM1)c3u6In4>}s`>B{rzJ9=T6XMq4mA1I!P<4s51T90kMH_L zYfZg8R9+ztFE5ur-M?5IDJqqr8!XY5Qy`CCK2iKUB2ykq%6ES0kto}vGo1Eg6V&nM z1gAp{QNKFT&IwyfFd_ri3iPhpzsJENb| zZ&F`%hWlsd%%r0#?)<)@zy5EDk2osg54XtyUn_Cej?d-U?`;t0)UKE3zOYgZoVPu6 zUTMB{{`A_=pj(PR8$9ZX(2&Wqf(iZRg%U3qA51*)xIHv>aByg&ZC}t58NAS+XAj#o z$QoYewv+3+S|dv0?UcvAw^HX^6iU1E6DvI{E;Mr5YHL(Nd+4IHrPjrffslJZRnXn! z4~;(lXmHHtiqIwd<_0gVskg^&%n4q$yxhM0xzymeqEh>cB~z^NIR*9vE5*tfk!fdU z^tLjOEC^){PY!17nh~1Ry=QPz-6f&P2Y(7)xl-DmJw2@KMPuwKYrBgauUk%C(JHRG zG+s`dw?kY#phMwuu#6S4z()pCH$gZb+d+HDW z{dZCS$=`bFzb732^huX191g6C=whCu^|Lb1j_c%eH}gr)aJZ#8qVLI{da`p}Q!@1X zYW<$7-`DWH{=mE*aWq!&mtkZu@tJCJ@DbwiMjAT2?9f;0u`3epy&FGypM z&LFKpdV@3v>5fsi2kDPdHwftv(jufsNRyB*A#FnXgft526w)fBS4gvvZXxYL`h_&i zs5^$V4CxutG&Eh~fwu9|eFGYYbPj19(mSMiNcWKTA^k%dh;-1XTZr@!X(G}^q>V@) zkwzk&L|TdT5@{yVO{ASjKaqwa9YtDd)ICL-igXoeE7Dh_u}Ei;)*`({nu~N7X)n@W zq`^krVWh=I-D9N5NSBc|BYj31jdU7mHPUOO*+{pMb|d{p8jf@vX}MAN9BDezb)@Y` z-;u^6okv=a^d4zG(tV`;NdJ)yVAMMR*#btr2aru*)Vl!L2FN}@HUhE}kgb611!OZI zy8+n_$bLXJgi-GZWJ?(Jo<(mmAo~N^Ajl3u zwun*h5oD7X^)5lSiBazpWTPNE1=%XdUO_esvRjbtg6tP$!yr2b*)m4GXOK-})Vl`R zHb%W~kd1@v9AxVtdk5J($nHV5kLw>ypNLL>SSA0DpT8bIv3ek-k4aC_TWMH!dU9HF P%CPLz)a2CUl-S<@u$syt literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Pacific b/lib/pytz/zoneinfo/Canada/Pacific new file mode 100644 index 0000000000000000000000000000000000000000..9b5d924173e6e71c2c0a73cf2aca368d3af002e6 GIT binary patch literal 2901 zcmd_rT~L%&9LMnmUl4>mnJ5w3NlgS50YOnT({xr$L}ev0!n+jJFzEWqhQ&ivqqmnnp1o21F!!oHpl@V+xnDOm?@_1x$K-UKQD=NL^3(2*jIVZ^{JiBg(_Fb$ekodM&X(rNuNia9 zZ+Yo*E-uBKchA!ode2rZvEy`0`zX~K5~EuiBGtw7etl`*4b^tES^r+qq%K!?%9T|I z)zxyJv~N0Wt`*nG^@Z=7KeDRi&xy~Q8zal4V`!DS*{eWq^(a%fI-b+FTMAU?$$b6S zm(!I0fLjMXh*z$iaXRS5a233+r|z<9sOh>mR=Uj&Gucas{MqemyVm@y$~IxVJ~O1luP$KJl6#vSd{>8HO^PELRydJM{Fn znd;%UYh=cWWhQgOHktX!43o8Bt<2hG%I{Qmb>T#2EZk(Ps z;HsMUMU2k%H>&x&{Q6Pfezl;iSwB|0L**?!q8C=KSBo-zviOBM0`Hihu7N=LW>*jAJ=}S!8JQl}tSR#9-e=%!{_#gP8~Y<3wr|g_0TCFFVv4?3kXyYDIrorq=-lr zkuoB6L<)&i5-FvlttC=Sq?$-Mk$NHpMJkGv6sajvRHUj%S&_OTg+(fhloqM2qb)8{ zU8KB7eUSnq6-G*o)EFr;Qe~vfNS%>FBb9cvrABJ)Xp4^%tOK$T$Vxccr9jrg z(JltE8jf~3ko7effh^!#8gvc5qi-@cuvW&<&y8gfO7-m;9&93M0 U%uxx+35mlqladmW5)#Ay2A}k~%K!iX literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Saskatchewan b/lib/pytz/zoneinfo/Canada/Saskatchewan new file mode 100644 index 0000000000000000000000000000000000000000..5fe8d6b618e34c4c87a7eac43f7a27af41161d02 GIT binary patch literal 994 zcmc)I%S%*Y9Eb7Wl%-5VyqJMViy|}(1r3t+8kQv%jsz`aOcZA2p+6uFEegTK3$7jA zL`0A)FT<2XUdjsyW0`kP-ZCvYM2iUO_&%@gbmPi1yytT`%rJ}R8@(TIz9RdsljaSF ztIQmpb6s zu9t}GFYyQN%A;F)^=5^;R$r{w3k%$h$}06WyIeLe6{*di`Ley?tMAiO@?#{ep>QT XtO!|>)vO6w6tXHj`elX9)XKuUTnF}q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Canada/Yukon b/lib/pytz/zoneinfo/Canada/Yukon new file mode 100644 index 0000000000000000000000000000000000000000..8604c5c535ed426dd42ca319664deed06de15923 GIT binary patch literal 2093 zcmdtie`wTo9LMq7Qm4zo2XZ>s%|1G7Zf>_9Zu4~Z%O>Y+=TT|-i_D#or}HywB>z^mx%Znn;vbo9 z3O>9o^Uh6}`%YaKomi~r@Bcs+M00du=T~ZB%}@INHSenjazE2WB`H-jIc|!xld5>^ zQ&aN8R<(HeoGBgcld{wi6Bybo<$W)iiX$tfvf~Y1)w58RgnD&#%QUGeit5^`?`3IL zPzU`}Rq$rMUiQaLwfy5dy6*ZXYDGHFJb3XP_0YiY=Ha&vsYklLHjnn7k(Euu=CRh- zrM}#lRrT9t)i0wuRCZ8>M$hYpoNcOM=(t`pzE(YcV#qXp(JA3QN6p&z8l`FDPSboW zAWy7_nI{uUTKsh;vhJ2d?i844&F2#RcDY_Z?`O6CBfpMKj;al(C-u`~=hepk8~T~y z<7!juW&Lbwx7r*YGh1FeDqG7gnddroN^8!bX$!@qZM@%f6xB(`)pirlDvevASKAXI{leKv)fFw)-2*q&i?w&_9bK1IPwu$xZ91)by%{DEyd;USAI+}3 zLD@BY)$E?=mzPs#%`0EF%bvc&X77d7(zmI{?0d6BGBfPwt|^&QGpA+!2OrPd-|F*u z-as;AiamGxrn~b@_f($uwq(rW?3?S(1&!f|FII2w7JENs?`FQ+7tQuXvz05uJ^wQD z?>k7^KX*Hr`1}7nd+mMo;0a_G$TpCDAR9q;f^5ZU_kwH&*$uKCexCgx8$x!3Yzf&D zvMFR&$hMqzU&zLgogrI8_J(W@*&VVyWPiv8ksTsiMD~bm(rI^zY}0A?iEI?vDY8{$ zugGSR-6GpX_KR#7*)g(ZWY5T^kzFI(cG`U-8%K7IY#rGLH}An)%wtDXW#8`NF=u<7&!T-+omM5yeI3o|vU8y|c%-OSzZL0l zzMZ;XzTb ztNwH4(dg8ka*6UPOw`^HG0G=nx%9PylwaU6>G$Qh^1l)yC%iwT0!Givzym*uiM=B- zsC7_Wv!_Q+s^}DxA0O7ixx2)*9p~ z1v;!FMa}rQScX3nre?mGA+N8!q;A*|AtN?>iCHzCGBWiKabwOw9W{Ma%uca%wC6%4v{ELe zj|hicD(3}ei{zkgd8_ZRO0g!%lvDm9_1hRd|Il~hwi7`*?a2X=eqdZ@R2>!zT8FeO zIH?v^9MPFcudA%wXLWYSQz|E+Q|A0!s}=|El8eVZMeeyKx#U=lxP7QdF738dUdMcy z-{&WmIk)QNZHJXJ^NhZu-ybQMocaS*<+PCW|hGs=Hp> zBv*~i6L)VfmBq)e6RR6qbjkjU;vQFtE`9VfvBn|w+Ol4;Zc3ys%WfBCzj*8Nx$mm- z(OI%0XpgGs^^%o8Y*tlmV{-lJHEKgkzr62_ELB~6P}V$GDr(cV%etmaQ9t`tegBFm z(ctrhZcO$SjbrO|^VDxd^YMKB!19V1&t z_Ka*A*)_6lTeELu&+q!G5J z6G$tNULegtx`DI<=?Bsfq$5a6ke(n-LArvp1?daY7+cdBq&2ptH%N1k?jY?!`hzqG z=@8N)q(?}TkS-x@Li&U>3h9)sX%*5dq*+L}kai*cLK=p23~3qCGo)!q*O0a$eM1_D zbPj19(mSMiwx)YX`;h)24MaMKv=Heb(nO?-NE?wpB8@~kiL}zz^b%>Nt?4GxPNbhm zLy?XmEk$~YG!^M8(pIFeNMn)CBCSPwYipW|bl29j7wIq3V5GxHi;*59O-8zmv>E9$ z(rBd9NUM=v+nQ!0-L^IDM*3}Q8jf@vX*tq!r0Gc4k+vg!M;ecG9%((&d!+eD_iau4 zk^bA7GXOaUkh1_e50EnfITw(#0XZL#Gs6169ufiO0C8}D#O9~QCB!AiCpu#D9dQo( GE#Oc1o$gKm literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Chile/EasterIsland b/lib/pytz/zoneinfo/Chile/EasterIsland new file mode 100644 index 0000000000000000000000000000000000000000..8c8a6c7d914531ff2f3de76c7b9d50fcf0b3463f GIT binary patch literal 2295 zcmdtiUrg0y9LMqB;eXIc2{lwIG$ji>JpAzlhWLn_#1y4Cs2Lh6V}J%kfR-fYDk--@ zXEis-YW?v~*;l(zO6pn5OvUBda^#A-!v3w8m8E5#p7*bdF52p@i=K0S&+E6doy*TV zxU{Y&$NAfdwZHIiy6nSy(nfpr9@U-wDGj=R&6lS>9)C=J4;ARk+kTaB|4HLCevQ!eJfz#jwO(PSRJr8c2%9unK7Vp^*lW- zp;-C7hx9EmrzOuBq4Q3}sr;|g&GbFrs9TSDO~DiWYR0Z%Q&_)G&Fl;rU4C3<)xK$p za$b|!B~P2;q$eeibwCGxY?3*N+x48`2vu^fP0u~tsBRA}(en;DGJpScUHU<+T2Qvh zEbQJVWkqMq9qnCmXZp{&ys}w>37_bSf}m7<`<7mu;+MrIy7iJv$#U1;4SMO&Gm$t+z*N z`yzGScO6pSJ)~EkTp?@PkLr70pDhiQyLIC;RjR3Ai@vX|NHtG>)!e^ml4^;5+_d^* zRO`@cvv%}XYVDCyv+n$H^}x%S=E2X;%KAqWP5V0^%0mr4z2Uh%(y?Hq_C!TQ{?jKq z=D+=f!#ipn$KLClsBpNa%ZUqz_df6O{&n+9uXw!kOM^~;XI8m;+1qhE9=?AP{P!7c zwSRGEbiuxG!uHf{E=opbg-pxU&I_3sGBb3i#tU=f*vSF2L#Bt!51Ak`Lu87`99``s zky#?sMCOT16qzY9Rb;NnWRck-(?#ZsOcEA?ZW%ha?cmAd*5P zhe#5UEFx)iwRuDm>1s2Hq|(*q5=kbKO(dO2K9PhX8AVcx% C7-D+> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Cuba b/lib/pytz/zoneinfo/Cuba new file mode 100644 index 0000000000000000000000000000000000000000..1a58fcdc988ea6ec1bb660ceabe85ea19e5b0774 GIT binary patch literal 2437 zcmdtjZ%kEn9LMp)-yCXL{y`%Nlf+6;u^XxQwI zbvPPo9rB!}K@Dp->4n#JX~fn+H?s75iSkYHqTLg6)3h9ST*4t4H&XA#I1Li>Q>8op zVo)Z0yVslO<;u;+H+#3Vy)Bb=9dvKa|50x%Z}%prcI%WC2fV38&+D`~^6!^z1v6UYTUUE?j8LxI`c@T8^5DK?rcnO6AFDYt2*4hYgxX|UUkV$oV`%* zPC4bynfkIO#SVH&W34iGq(|==*eCb)9n{rtX>~&akbR{hCJ3#EE|(|$m11DrEKB`*_1s?HeJq?ilhjs_$ony(LYG=gK()F zJ}q1J$Lka4hO}zCqfZ_=snu%+b!%g*KIQAuZPokq>8bB&&FU>$Gj>9@r@tiI2M$Ty z^h&AgY>;QK<;jkgpwwSTk)6A9Wmm@}*&o%ulVG-f~_KXbw%NZ5<-<_dQ+Zo3( zch89og-&~6<3ge1N1X|O-uWcYA8>NawJghA1p%`j#|aCwIDvoO-CO3Hc6ZnQ_=)+q zP$<|iw*%QBvPEQ%$R@33m&i7)W}nDLt!AglR*}6Tn?-huY}ab`i)`3xc8qKp*)y_f zWY=i7%>(=9FdGMUj%*#-JFu$mqqO+dPUv;pY@(g>szNGp(D zAk9F!fwTkZ2htFvBS=fErYA^KkggzYLHdF;2I&ma8l*Q!bCB*J?Lqp3GzjUC)wBrd z5z-{2OGulLJ|T@lI)$_f=@rr}q+3Y4kbWTzLpo+PEkk-{HBCdhhO`ao8`3zWb4cru z-XYCHx@R@*L;7bm4MaL>891R6X~bbG!*Hm z)wC4pDbiG=t4Ldsz9Nl9I*YUx=`GS+q`OFak^WjugOLtfO^cBpTTPRZE+cJ5`iwLh z=`_-6q}NEZk!~aHM*3|v4M#d|H7!SaZZ%Cux{kCR={wSRr1MDYk={H1=itYfH-OK) Z3Fi41rlh4Tn7?42KQ%Qa)jXxf{0?B7_b&hd literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/EET b/lib/pytz/zoneinfo/EET new file mode 100644 index 0000000000000000000000000000000000000000..beb273a24838c96e81f0469e3827bea20ff930c3 GIT binary patch literal 1876 zcmd7ST};(=9LMoPj+LzFi-wRqS%fGa;qaIcM1}{Za#Y}Gq#_xJQba+-z$DgU&J}Z? zHdb@x)XA80dh!D60;%~pTgKcRYgW!JSK7l|n>wGy=I{N#?W(K#pPjw_JKMSYzCYo$ ztu1-Je>@MlU-^>999;l&O8B&feUTrnj~iT1QQ(b+#nf+qtRM6})Tj z#QCf{Ctlru|6)C{x7Bn0l=WV}sI5mv?A?hmZQFH1eUaUIuXkAeJNxB}`p-VTtU#1I zwd6Uz=wPre>^BWI4BI&$vF$s=cvoIY~?NCHR(ND4>}ND@dEo-Pd}4COnR~j_k=&8wk?fK5k^GSf zKxP1$0%Q)5NkC=+nFeGYkcmKM!qZI!G8f2XAhUr?2QnYXgdj75ObIe4$fO{%f=mlC zFUZ6oGvn!|2ALaAH#x}cAk%})4>Cc>3?Wm5|I;~&al5oIkA?Dw^0N!G13CHrP;S6` HD~Ndj&PSQE literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/EST b/lib/pytz/zoneinfo/EST new file mode 100644 index 0000000000000000000000000000000000000000..ae346633c1690d49530e760f8506218bfa9feef1 GIT binary patch literal 127 zcmWHE%1kq2zyORu5fFv}5S!)y|D78c7+ixxfSeG*`e0_T{D7H)YycO~98)d;2yPBR literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/EST5EDT b/lib/pytz/zoneinfo/EST5EDT new file mode 100644 index 0000000000000000000000000000000000000000..54541fc271644e44973989a27f3846a16800caf5 GIT binary patch literal 2294 zcmdtiZ%oxy9LMnslIR5zDhd^;pkg5Z?J6P_CXB9jh4f-bre6)bLnuyaHz>oJDCyQ* z)1ZH&EHi6!WMy%7`BeL=@QuFW_VkvO@cuBz2CzT&)!FFGdi&rMbnlXuI+eLIcU zY>@=LStWk+hE6&XQdb}D(v#lVWRhQ6ty8u(nQJ=k(bqOto9mY5>QsM@xqfz{z9A>U zq-EyF>eAug&FryHblHhBX3lqgy1f54b;rj~ z>pQo9sqT7Zm9A)eU(M~>D0kQFRP!2FN@ZbKRaMTDs?oisI)8<(9^7X9NprRTK&zQQ zlBE~Cx>eo#b%g{rw5Ww2W=hSnfU50@ll#gG)uP9SWpR3n3f7&J;Mk~I;(J$?{5Z+f zXPnUW$1j?tk-fTM_n>)TXq#Tvb!a3$+A3P%FDzWL18TdMFf-#-w)D zR9z@dBMmB)og$%A<*Ir7s5I}(P-}+2l9rw_(|Y=%emI2!s9mmjkuf8KMn;Vc8yPn;aAf4j(4BVd$l#IDBg03=j|2dT z01^Tu21pQ)C?H`#;(!FgX(NGz!f9iH1Otf%5)LFDNI;N?AR$3wf&>ML3KAA1E=XXI z$RMF{+Snk$L860%2Z;|7AS6Oah>#c|K|-R0gb9fg5-6vQ6cQ?@jTI6sr;Qd8E+k$^ zz>tU`Awyz@1PzHA5;i1mNZ_0{a!BZ$Hg-txoHlw$_>lM^0YoB*gb;}#5=115NEneg zB7sCAiGh0lNKlceB4I`1iUbykED~C$jV%&fr;RQW uUZ;&O5@004NQjXbBSA)@jQ^W3du^?Kw%U1t83iQ;MR|eZ;)3FWBJbZZQm>H! literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Egypt b/lib/pytz/zoneinfo/Egypt new file mode 100644 index 0000000000000000000000000000000000000000..0eeed1138f2849ad49240834b8a357467637f1b8 GIT binary patch literal 2779 zcmciDdrZ}39LMnoydi3Zm&^-9CQ2w?5HXarQXDF*$H5W7lm1Y{Tk6m7hA8~zn$FTW zqh-sbmgOq7@_tF(v@I`WJn6`$TRyl-G%vptFi5iJ{W$-r&Hl3Q**UK>k@45>{mIIm zmY*R0b&bux@aFcKH{V-%=HqzjE`7YdNS(NIL!UfWrOL`acfP&wj5@WoQh)bNwJKko zuhpl8>igFnefo`UvMOnkQ&k-=ue8|jT&Zx>Z&&O6tGj24Yvm>W^|d#}jW2h)eqoIF z`vFY-E8r}&)?3$Js9>qc%!6j6IN`A>RNyy&%O-KX-_i7tzR z?$g7fMc0Yl-I$n6uUpF3Zuh3kMQmiPAA9M57blARxRXP?9+h9XJ+~By_@fv7UMn|y zy*IYeeP*;&eM@R|!pQ5Y--NR|F;1!eL-y;W@EvME><`X>Tjer2Y@d^SX1g3%^`0|m zXQ@m%zSw!LbeyC#Z*}1k|w6gLP`_206TMv>vXf%e074 zowheZjqrcgBUkO0qt1mm>3N4#`iikS{iAxBIqzAWxpjrgJa^E^>bg`0l4m%9!deyB zK1q*hJXvOUS+29?dYN6UboMn*<%CYtIo(rLPGF&vb84c>y*NteR_{>by8Pmd+cr!E zkH+g@S*{G$t<*2C>ZZo4L}$XxFy*}efs;2TMoyfYrY9velaojK&MWOM%lx?0PJT_9 zoEpB%nOd<;PP_7sGky1JIpfR@{p$KHYUa+j^{nDmYIf;-?T(+V<`j(8uMHlp3Ni=k zxl#Spyp(KbelsZ-bWU~_TuP7&?{snskF}MH&Nnzkn``CbuP!)Cmi;J;*Bx<6@(;++ zkcaeY6#C%Ty}#SY{Plmn@BidMzb`~&z&w>Au1|=0L_0@|=J&p*XJm=g)J#6I_<0B) zS^xd4?=%0+KI?z*3+C>cTXHC43-at+&GU~m0O6Ob+-Z9w{fGy>@a(h95T z1=0+w=?2mcq#sB_kd7cNL3)BT1?dXX7NjpoW01}ut+ATkAkDFw?jY?!`hzqG=@8l$ z@r537rb$4TkTxNG;%F4oDUMbly+WF0HQnN9m(}zOX&BNmq-99YIGToZ4QU(FH;%?3 zo#SX7(mSMiR?|I>_E}B;kOm?hL|TaSkfVu67m+q1edK5)(n*e1BE3YKiFA{romSIN zq@h;RQKY3vPdS>3bQNhU(pRLhNN17OBE3bLi*y%huhsMyX|UCF7-=!mW2DJQmytFj zeMTCMbQ)HVD}vWQ&kJLN*E6C1jhB zeL^-0*(qeJtY)u}%|dny*)C+ikPSn24B0Yd&%{HTGiK_+uluw0KMY#9S-1EEcCKee WVz2(a`uFbBGb7o2N$j1F82&ft7KZr% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Eire b/lib/pytz/zoneinfo/Eire new file mode 100644 index 0000000000000000000000000000000000000000..a7cffbbb95616c3254ca907795b9015f33a11b0f GIT binary patch literal 3559 zcmeI!X;77A7>Dtf5EV3hG`B?MFD!Ch0teN9l&p6ktNnrTLxrjMO7=lssVk#GL5+uM6^Ql#^@ zM{o*mx=dL$1rcF09C)Y9NgVlOg+D$Wiz%)H) z)J8Kmex>;$`iA}@GuoNg=w&^>NuJJX8m6;OmzV|44zr-3hW@Jff?k-PtiRs0U1!ga z*WZ|Qv*<&QUi|S^vm_YM zT-0m!_0r$2+@Zf)e9L_Q*)qL$YLH$xVzT~WWSL%{kghj$pKE?>)ko*GUZ?XDcA5M_ zf3q`YyDF&P!0d{jp>~HotoMXZ)O$)l(S?C2y72TFz3+G}v;U_p`oM-#b1>(WIh0*f zAI{utj-)L9)xGBAjY_(>Y?wY(cvhc2&{LmT z{fj=k^tw6s`C@&3%s_Kt=4gGfUyQkwk*F_sNzzvm+v=+g?l-?hhv;k0c~cVXr%MiR zGo`nR^!2q%&5e`3=H{rcOj&+zif>AuE*F}h{kmo8^8T%L1e2U4x@iIzRnt|59W-i6xT)HGt*Mr-%mWd#P4$FJrbftVso5$|YWe;w zwQFZd?PK4_gC*mo&iY9bbY#A&yD(eTTQ^SCpCBrD))1wWQ&od8aq6L1uL|kcOf?K^ zsTy_ZDvc}GR!y2dDow6FCZPdUB($KWG%da;kK~j{*rpxwXl9{=f4NMW4PPUkag)VU z)X<6OyH`b~=BVaTx$3bVpQ;x1rmL2bBUH<>4Am+qQ(7J9r=GZxCau41ub$kOAZ_L) zN>pxZd1`E1iJn?lo=yspw(mco+I8}i_WgrYhek!xv3;59)zUx%X zmW`^*u{o-1_5u~V`Fn|*I6=iPoF&f=9jdxbcu%_b8ZSMP2T0Gylf@g`S)QvEFVBaC z%JbJel2D_QO4waZUbyb?Am^eG5XFPf^VZ_XL@QpRPKn7Kv0oLHdxjXou>M6Xb< z_T4P~gQutgQQ4AIahVLPCo=HlWEoVJDuXws%WDU`G9;^yBjss6Zm*17G*6A%k}Gdd z`9Vf!PnUPn=Ss%J4Dq?IobR8{@_yy+{j5;lKL5e@+`m*Y;O~FXKj3c9@^|~?zy00q z+t(X9j(xAZ@)6Tv+ z+t2plKR ztB)){vi`2N07wN~Z3&PXAVol`fRq8L15ya25=beKS|G(hs)3XPsRvRJS6dOJBuGt= zq99d4%7WAdDGX8>q%=rvkm4ZKLCS;F2PqIzAy-=>q((@QkSZZ%Lh8hS0}6#y3Mmy* zE2LPiwpvKJTy4FOf*}<{N`}-7DH>8Wq-;ptkisFALrRC#4k;c|J*0fDwth$fkqROu zL~4i>5vd|lMx>5NA(2WVr9^6p6cec?QchP}Po$u(wxUQ$k(we!MXHLF6{#yySXWzF z9+VcTEmB;hx=4AE`nuWzBNcYFB}QtD6d9>9Qf8#iNTHERBc(=ajT9THHd1b+-blfb zio4p9BQN$E)3+#KrRjB z+CVN2QJ&BI%SdzwXr#YK4d6!1H*uKEA~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT b/lib/pytz/zoneinfo/Etc/GMT new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5SsUJ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT+11 b/lib/pytz/zoneinfo/Etc/GMT+11 new file mode 100644 index 0000000000000000000000000000000000000000..af4a6b3409c20b6f505cc78fb90bc6dc87cd1b72 GIT binary patch literal 139 zcmWHE%1kq2zyORu5fFv}5S#t~|I`2m26x{OZ9_vKKZLMWm@(`>UPx# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT+2 b/lib/pytz/zoneinfo/Etc/GMT+2 new file mode 100644 index 0000000000000000000000000000000000000000..85a1fc1d22404806d9dbcda1569b9603ded4fd64 GIT binary patch literal 135 zcmWHE%1kq2zyORu5fFv}5S#7)|Hls)7~FkBv_U#T25W+fPrm+2hnS*Ksmkro3J7X>Y Drg;!U literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT+4 b/lib/pytz/zoneinfo/Etc/GMT+4 new file mode 100644 index 0000000000000000000000000000000000000000..ab74517457178d8448daf3a7a745e51057298dab GIT binary patch literal 135 zcmWHE%1kq2zyORu5fFv}5S#7)|KkT37~FkBv`v8A5W+fPrm+2hnS*Ksmkro3I}T-3fm8uIjBZ(*?3G* E03^x@WB>pF literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT-10 b/lib/pytz/zoneinfo/Etc/GMT-10 new file mode 100644 index 0000000000000000000000000000000000000000..a4da44f5edb551a60efa97afbf016378b831d0e0 GIT binary patch literal 140 zcmWHE%1kq2zyORu5fFv}5SyKWp=SXDgS&5tuAu>tA3|6w%oz3`FoV#{;Ie@jX9qNr F3jou|3_<_^ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT-11 b/lib/pytz/zoneinfo/Etc/GMT-11 new file mode 100644 index 0000000000000000000000000000000000000000..e0112a9ce2d32319d13d698f75bff2fa603d813a GIT binary patch literal 140 zcmWHE%1kq2zyORu5fFv}5SyKWVb%r)26x{OT|+}4KZLMWm@(`>UUUT-3fm8uIjBZ(*?3M- E06*3VT-3fm8uIjBZ(*?37& E08Ly9Bme*a literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT-5 b/lib/pytz/zoneinfo/Etc/GMT-5 new file mode 100644 index 0000000000000000000000000000000000000000..8508e72381f72c77150a1fde02a64a2168bf1979 GIT binary patch literal 136 zcmWHE%1kq2zyORu5fFv}5SxvG!7YG+!QD4R*A&PNA*>T-3fm8uIjBZ(*?3J+ E09xV-WB>pF literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT-6 b/lib/pytz/zoneinfo/Etc/GMT-6 new file mode 100644 index 0000000000000000000000000000000000000000..5b9678ea2809932a4b0fc80c33448148d0baa9c0 GIT binary patch literal 136 zcmWHE%1kq2zyORu5fFv}5SxvGAtZr;!QD4R*9^!FA*>T-3fm8uIjBZ(*?3D) E0BC3mqyPW_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT-7 b/lib/pytz/zoneinfo/Etc/GMT-7 new file mode 100644 index 0000000000000000000000000000000000000000..ccf4c39480488e44442ae77aff9a842757af64e9 GIT binary patch literal 136 zcmWHE%1kq2zyORu5fFv}5SxvGA*q0Y!QD4R*BrT-3fm8uIjBZ(*?3P; E0CnyPT-3fm8uIjBZ(*?6^LgS&5tt|gEgLRcrv6t*8Qb5M=ovH=@rr)$Xt E0Fe3%WB>pF literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/GMT0 b/lib/pytz/zoneinfo/Etc/GMT0 new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Etc/UTC b/lib/pytz/zoneinfo/Etc/UTC new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss5kyH%QhG!qfl!}_6ucy&7TF^b!_u%qYjx(n zIhHv!ZwTrT(eO6F`-P^G)hIPxo389)_l-OWZMoqS>q7T`dr(CwTbl7NbX`N(ky|Tovj=p47pO|HD`##^Q zsY=3H#ACk zb&d2`y;dS@m7WVSgeRcVtM9HIg`}L=xLd zWoY+u9eQ!P4EwoQhc~+Aq0h5*MCDXXsvoWoFHP2wtHO0uexxRs4ULpouF90s z>oT>xS*A^Gk?D)R)u%`9kr}zCG_U7MnVD3h`5kxZtRAYfzFwiT+X{6~(*k|wqDPUJTO>`uU(RofSX!! z=BO-aJ*O|$R?17w^}2B7VO> znGbIB;O4;`KISkt{{D2BPoBR|$ZqouCn2|f|LQO1XcsiT07;6Y$qJGdBrix}kjx;d zL2`p62gweS9wa|Vf{+X$DME7OXp)3v2}zTq$rF+&BvVMLkX#|jLb8RV3&|IfFeGD0 z%8;BPNpmz=L(=AG@`fZ1$sCe8BzH*iknADpb2Rxw5{P6FNgpaN0VwK*GRIFY$NGL@{J@M$vBd7B9%m6Y4$Q&S(fXo6i4ahto6M@VGG8M>NIGV{oX2a1; z2QnYXgdj75ObIe4$fO{%f=mlCFUZ6oGlNVGGB=K9a*)|^G}D934>Cc>3?Wm5%n>q4 z$Sfh#gv=8%QOHaoQ-#bGGFixMIhyH0<_noHWX6ywL*@*bG-TG0X+!1>nK)$Tkf}rF z4w*b;_8iUhA@k>GCJ>oHWD1cvL?#iLMPwS0d3621k7hRm{?$JEn-v`p&y2>TC&Uhk Ojf+W-kHdip{=WiRC6!qK literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Andorra b/lib/pytz/zoneinfo/Europe/Andorra new file mode 100644 index 0000000000000000000000000000000000000000..b06de7a5904dd87bc1c43c023418bf2829c01df0 GIT binary patch literal 1751 zcmdVaT};h!9LMqhQB7-R-)v;l1Cqkgb5TlioLUsBKjk5b9>ftPtvtn;nYH%4Xv{E7 zYi4OS;UZ=XW30K*3^N<^I5Qg-v&QfJcj>~N&A*-VI&0_LecoStd2Mx~^~V!n{=6C#nI`!6Yo!0wJ-JS1rddCxu-FHvp8n0-4QJl99M_>UpH)cOk0wpO>XBJbOEsgbTr>O9b@u*j%{m&YbE+dXdtaK& z%?VIX#&{py#aQ-@m2>?QOEK`;;!K ztCPiTd$q9SpceV6w0MS3?fhI_60%U2x~J&U!FVklog<}>M(MJzF|z#97hN$BBr8t_ zOIhy+S=IVk$~*dHb@_l)?0m0lvM$Tovin*YaYU*zd$c<6qSl0|*1SEe>xLS%_D+kg ze`f23Gb_}0yIwXP$ke*7V%fAVLO1VEk@~_6X{Zj7EeYY$=>0BRgDq)_8?8;BUP$xE zL2Z6;Q(8Vg)@|pzWP5*~?l^KoclI3DT`is3+TNd&%V?1abPvPOy(Xj!4- zT_b*f&M&a760dD}oL8~U*IX{=&HnoUH<~Xx1N_GC%=6PcyHYQ7AcN##l*llVaUugn zMv4p-87neaWVFa|k?|q}b~GbKhK!6E88k9#WZ1~Kk%1#4M~04!9T_|_dSv*>_>lk{ zO$0~?jwS{q2qX$53?vRD5F`>L6eJcT7$h1b93&njAS5CrBu5hy5|pEf3JD8|3keK~ z3<(X14G9j34hav54+#*75DC%I#E1mxXre^IMB+pOMIuE)MPfyQMWRK*MdC#QMj}Q+ zMq);Sb~I5VVIy%Pfg_P4p(C*)!6VTl;Un=Q2LL$&$RR+E0df!=%~3!OgQGbP$bmqP s1ac^lV}TqD2SN07ND(-S=2V%uGi6q^zo?=DD)$GD`RU#;eS#{d8T literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Athens b/lib/pytz/zoneinfo/Europe/Athens new file mode 100644 index 0000000000000000000000000000000000000000..0001602fdccd0bbb1849adf237c93aa93ea80a73 GIT binary patch literal 2271 zcmd_qZ%kEn9LMonYNR3;pG){}mB=e!PQm+^$p z+qbT+Hr@KiahW$b4 z$3B0lZu@BX>s6!2TqRR3Z>*XceXVNhP{5itaL}6Gy~ZEfS>d12vB)pmev`18o#Qi0 zd&k2Io~^n)<77!h!cQ6*{*6Wj4QbTX_ci*$QN81fLptlD7d7U@fW#gNNZe}&Wp<}Z zeA_cJr@m3*H@a{@tLMHozT>pw>7QeoTNAQNk(qJWR|`xS#bv>JH17+C)#CkOqVV`ze$$- z+N?_lJaX^X<(kt|sk!}2^}gNtnzuhz@2^eJ{Ej8EtiY+BniyH0cu~Ec2`O-%(ZYmL zDZKK%tO)v0Rt%q(2QIuND+9e!H28rQ@9mUTN8Zo}8ycjf^EqAp&>k(V->hYe>eW|N zsB5BD>00-EU3(){%OeV;{L7j8@b!7J?!9Zee#9jk4uwfY-(}g@epM>FhNY@Uq&b^Got*|9oD8f&BEiL}|$1(^H0k?*s>O+IzlWf zc8)dOvO>iQNLx<(tdvE#v01JoaUN`13f-x%Fr{9KL_w(F^9=_Kt4e z`nBK9Y07(bIkGEcTefCj$i|SJAzMTChHMVm9kM-Jvp@a}8$@=M}~ zvQK2A$WD>1B6~$Pi|iKJuC3WGvSDP$$d-{kBb!Ecjcgm)H?nbL=g8KPy(61Pc8_cy z*}tu60MY@Z1xOE&CLmov+JN){X#~;4>dq3DOg!DM(k4 zwjg~$8iRBOX${gFq&Y};koF+`K^lZ~$kwz7>5;8z64E84O-P@RMj@R-T7~oqX%^Bg zq+LkAkcJ^0vo$S4dS+{yhI9>S8`3wVaY*No)*-z^nul}`X&=%*q=85WkrpC7v^7mc zx`?z9=_Areq?1T1kzOLrM7oKz6X_??P^6h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Belgrade b/lib/pytz/zoneinfo/Europe/Belgrade new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Berlin b/lib/pytz/zoneinfo/Europe/Berlin new file mode 100644 index 0000000000000000000000000000000000000000..b4f2a2af6de4526f909f743425c0858d32cd14ad GIT binary patch literal 2335 zcmd_qZ%kEn9LMo<`IExPeZ}CKfKWF=>}F`LEmveU$7W8dvHHDFTebC|XFciMd(P|JGk9>f z`+57;Y^q7O{&9qvC!8F6&B=Y_4s+>>aIWtPT%0&C)~DS)$MuzWR1dax>R?!d4viPd z+1H#pe8elC?#q;OFD1%|KUzjkbXcQhr+17VULm8GGAGVg#pq{6E)C?2Bm`>5HE?u` z^?7o?TyW2kFTy%xEa;4mU2T%c;m(O!B}?VDj9)a$^@B#w`cj?2XVrQAw9dZrrrv)3 zkj{C3ug07_C9%i)B<{dLncJokzwJdysPoGm>#F3=VxPEj^W`pgwj{81b*e9Lox1YTmvvR$1}(j}PJP9N`iOJ6u68Hu>YE-diz<+^3z7QRjd`->#80|55H9Nu z&6M)~ALa3u>r&A^EbA)*Qn~$mePZ$3QdK^r)$#jfLtdZOOh2x*ajLc7b?U|`zit|A z)+aCf^r@q3weF(^d3txA*7ugm=8f_C%&shHSeYyS8mDYYn=4!Mf0k!wTGE&rrHvC` zOVf;-+H~#%X}&h5&%M_ocC$s)70HPf{<^F=0%%ov$6GG|*eX}%`2My8F-8<{vVb7bnaX70%3 zk=Y~DN9K>}w!@@s1njAYo>q!`IDl4K;yNScv6BZ)>bZT*ir9c~)FAdQ;4OEOZjQatXG KG!Ld`g#Qj|*MPME literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Bratislava b/lib/pytz/zoneinfo/Europe/Bratislava new file mode 100644 index 0000000000000000000000000000000000000000..4eabe5c81bd1eaf255fe09cfe72f10f61762fd5f GIT binary patch literal 2272 zcmc)Ke@xVM9LMoH&J$tE-e`Cv2UtW%o&1K-AhQz~dgm`LBNfp+QR^ZgV^A8lm~)M> zcaG^C;*Vmino)mXt)aDkjuxV6m!0L>a%EaMH*->r+2{G)T3h7be!u(P<8!e47MLb+Z{f8WnZLUdwP%P{`XW5wC~ZOm_{AGut<)- z?$VK0ymDe!ft;*gqhGe(kW+`2$m#8F{i^bmgnh;Gb>@Ja@yw8KVs=Z!-XM{SEh4`K z8{&f*IyI1})BFqMw){UeA?+8PKJ7boIgYFA>ie2_{w=-z^g*5R@ynWY^q9C0_e=7= z1Cr9AlDhQ;nHg%5J65lkJADC3D=Cw^JVlc3&XBt&CrXCXs~Oh{bk>jQni-z1_k0?y z_YRDzr}v`H?mDYk?O$m2mZO>zd{c94B9hlMAo-<(Qm}lt6ejmbQC^D_jkQT}QkNE= zTPJh=YSy{OymJ4y)mqY5tEGc;^?@DpbzZkyAFNB$`R#M$p>n5sgGsU={eqTx$E4i( zxmKioFBO-6l1j&iQaLgt3(vnQi~8RX-|z?O@9dDphY#tJ`g&Q~@uDt!V$HsJ~LGxy*5i$zVo}T3dhRogYi-`@T;t8yDGI^BeJ$O zEbF#i(#Ph#E9+~9HJG|rHk9^j-J~PBF9>)LYi6KTC1(P!W8 zljlYTb?e@bbX$LqKHu7_ZJlizWsh=97=K4OC*F$N{E&B=AF|Co(>}pwixXSS&CW~x z0h_n1ikASNu$v3bA@KjRnPmRS!=>io96!oCbKNjkO69Im44t@S$((r4q>x!5(?aHj zObnSBGBspw$mEdOA=5+VhfENeAu>f|j>sgDSt8R!=4ojr%6DL<$W)QJB9ldCi%b`p zFEU|EGh<}R$efW$BeO=Pjm#UFI5Kl&>d4%Y$s@BzrjN`YNdS@oOOpa52S^f-EFft> z@_-})$pn%LBo|0BkZd67K=Oek1j&e{NePk@OOq5ND@a<9yda4|GJ~WB$qkYmBs)lY zko+JCLNbJ;2+5J9NfMGJOOqxfPe`JWOd+X4a)l%d$rh3>Bwt9vkc=TILvm(ml7?i> z(xeT^8n}zs~WuV`B8J zxc>>T$*>8q$*`$poYtj>n&ygW379e*0GJ=t!qc8u2aQSSDi^~C|P5S$k!r6 z${bW(Mwzv4E#_VsosBX!k&rRlV$Wv6wlwy8KOz#g#D9LL&+|H`d7A#yzCURrCTGPs zo;Z!nceptF%*FeFx#qT_SKqnCjYD&bYcE-=E|%%)k_!EFuj-nj<+`TkUX*9-25aq{ z%G`Ak&8_t_Q>>EsdDe!}A-ScY@z%z$Vrx?er?t8MY-{taxZEu_##oq_I_)pO7ok=hk)U>wQT2)>KQsYwM+d*%nDT zbx>26ES3RV*XqEW92vCu106j4BOQ`GL5IGYt!XKJ^o_s)IxM`s4!awr>HfVX{X%nn z^G-V%vFlIGsP&bRo@O$t{8t%Wcw5FStC6u|YGvH)pY^S7yJh^SQ<~X!rA$aH)2s#+ zIGm8j5t3zX4imYkG<_73nI{R-~^; zW0B4xtwnl^G#BZvt!XdPU!=iEhmjT|Jw}?0bQx(g(r2X6NT-ojBfUnNjdUAnx2@?n z(r~2XNXwC)BTYxTj2Jd5oAkj&7L5eg6s;iEy%tg8-wf&vNg!wAe)2i z4zfMS{vaD4F**9e4kex%e4%s_o^N`&`wh!4qWCM{MM7Ge@>>;v=wq_TRZAA7F*+^t3 zk*!4b64^|L)62V_w~x!m)v$hpr+)FgPd#ofZ{J4G>h*9n&32mAG{>x_pRikI7v=Hy vPXC9`Jb3caGbv^}|93y17*OWa*UW6k=p-|-;i<6^@extssnJmw8SDEm0cgd8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Bucharest b/lib/pytz/zoneinfo/Europe/Bucharest new file mode 100644 index 0000000000000000000000000000000000000000..e0eac4ce33159ec614c51379c6c48935a114b470 GIT binary patch literal 2221 zcmdtie@xVM9LMp`1y}6)u@4e~0ivRTcEACmXy7jbvN%OKja1}oA!41BsO(7eZt0v; za_@|3mTtHlvu4B}lo=YSWVE@Hez?{e^Sf--nzJ@?)*P$P^IQKk*ZQZwdf)CIx8uk7 zgV!^#d|gAny5-``Cp@{`=E?i`hT>rdQju|4S6ur}FNis#7eoj3qgP(nk9EJPD=+kme{Z{9c>IVgYHHF|?b~JX zsOM3OzALYq8C-hUb7oGIspLcL*ev2Jci(i`)}=}l$7>CN#D%g^8F&^e|lf(52*R(SyoofOThf7%k}4zU*mEG{(UZc%r`o7 z`IR3r_q(~^MLQ0e4>BP~Gb3b5$efT#A+tiJh0F_?7&0?tYRKG>$sw~triaXrzljMV zGeo9{%n_L+GD~Edj%J?7M3I>yQ$^;=FN%khCCqK@x*x21yN)8%L8IBs-2KJxG3#1R)thQiS9PNfMGJ zBuz-3kVGMwLQ;j~%F!eX$(Ex@7m_a|VMxZ1lp#4ol7?grNgI+kBymXQkklc$Lz0JN z&(Wk0$sdwHB!fr_ksKmPM6!sa5y>NxNFM~Kl{xFK1W@GhxpC_&Lpl3bkoZa&}oO|z+J3eps ziuJ{D)<2FZ<_Ry32J_$UGjBA zyPTf=hI})1hYWZg(}BxXGWb)po*B-Q3;Pd@4`u&ScyY_wvP;ge^TQS2jgR;pE*y3D zS))I9>Wr*J>lWvA^$-0)XU;gQ4zH8ynD|g<4ZW+gPc`eUU9W1uk)slLuuXz?@0B@q zD#4pxl8};exozcYxjoY@p~%96wjjH`zqbrVROwl_Un?E3N5%wkJ0u~fNfP6#ByqAv<_Fa4{Bx@$ z>Gw)qaMUIDp2^kZ)&foGSfKZ9U8D;e1NHvmP+e4;BoCzdsH-SI9t<1NRM(`W`JB-7 z&~GJuY*;e9j!8z(ad~Lyby?i@wq$mHq*>eRWXZvO`fzEfEUnw3*^h41WhHAg=bjRE zXQt~TjwQN0GD4SMjnQ2HG|4@Evp#lZuBe|trx^A*u*B`Ie4d>kYw>jxYl3XdPL}dwhddKIM=DZ(lV|-bsf_m5%JB)@9&kg_iX<2?3&Huj@7~St+tqWUa_zk=5Fod2kzDR+Q3L_;(YK#;asWMV#q|Qj8kxC<_Mrw@|+tySY zDYvbuH&Sq<;z-Gnnj=L=s*aT1vQtjI|Ced^HMO4?OZOwQoKXo;F_BrZF&ORi{S!K& B&MW`` literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Busingen b/lib/pytz/zoneinfo/Europe/Busingen new file mode 100644 index 0000000000000000000000000000000000000000..9c2b600b103dc4d1f49b5f087055e19d3e031129 GIT binary patch literal 1918 zcmciCUrd#C9LMqB0S*~U_N9i$0t7oE{yFj=LW4|?z^DVIoRk#AKL~0>K}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw-|-ibY0fu_Dh!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQS&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXCh_VZw7&2$UBM#c=ZZoym%EPQjudJVj?9dL4q)I&K0sJ zVU?w3df$_3UgZZGV_6zIc7xuY%?+=#(Xd?$czf{F{Ad^xuacKp6NHETVsB3 zw8rX}_1Lej>NEO>-47id(&G-k=bqWwqsMRWa3^fqttVD&b|;l}8_Bg>JSmyIB6aD@ zA}w^En3dchW{tLp^x#gJeqptk^J~4Fd(tHy9x0U>UF9;fcdpFZF<;JmJw!SyBjx<< zbHsuiZ|SNC77L>;$!yoC$npM6=0;u=xz~Oaizb{Ai-rcoqu;+R^19y@`GcoqL3^7h zJo2VoTvH?5ZO_XkkG&w5RD<8iZBbFb# zAy=ID5i1Y)i?X9v#j2L;qP%lR=;i0d>gFr*iFwDxlVyXlB7Bcnli4jR?eELAp;E5B z{EDm^t(EHr8s$?Liskx4D`fS#jbg*jOj*-aB%ZDcm(T1-6C0Ogh}uers7neHo3eiv zoBcIWpAaDH&F@5m-!0kT=@*UHzLC%MbcroPy|QV~S=rpZPi}2IC|lZFr0p*630{9a zCf+^CKFL1WXYBC3=gu)%z14f-ox^X+e|>0LuwDHRYg2D%UYZuzps80g5^cVk7BWrq zAHR;>@e^7AcWgo9*L0^XjQ4918`h~_6cjB~XCA+R z%{F$bCp`W0!)7b~=SjA!Pgs+@{l`atQ;(8k1GkZhA~Qv%YN_UmOct3fGF@c8$b^v@ zBU47^j7%DtH8O2v-pIs}nIlt2=8jArnLRRnWd2A3__N3Wk^&?LmMRHI7LYU`c|a0@ zWCBSAk_#jmNH&mkAo)NNf@B0qiKWU3k`yE>NLrA*SgOPznL$#6shML@6|D#*8l(j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Copenhagen b/lib/pytz/zoneinfo/Europe/Copenhagen new file mode 100644 index 0000000000000000000000000000000000000000..be87cf162e1a1a8470574ae2c62b92f61589b903 GIT binary patch literal 2160 zcmciCdrXye9LMqJoWl_#WnVNn7B>qCts{5H3utx(W^%l7R8kR5L@6RUDT7ieGv*p| zUmeRia#=Z6O{hPp{=mz$%W&b5vNLAobUL-$%*~uuWA%GKj^V1m{XKi0*Tciy{DaRs zu&ky!+xpLmG+($mE#~HZ^vmYap47UkJ$$6U*SB;--<7#?@_mnn-toz)-MP}gEmgh_ z#>qDmL{B%pCf~+)YItR*gwLIqfszwCaP+v03GKNy)<0YB%lT7dQ-0C7u|KHCby7Xo zkLb9IAL{+5_v-_n?b7%|hb7@)mn6RVmL#>RByWC2#@7br!4+%dq0)e)6cot>Z@#1^ zq{+jh#z~snr)f8Gb>gK|ofMAKN4|*EN4syS_rOm&x$Qem-~N?mY&@iyRh^nuaYnL( z-I7z-Be{#-ki5iwlAqlu`Gd_eCB99koLwzb|7y@_hkY{r`*JPlsMNxqX*#22md(E6$_T>82$s$HvPkJW0Rv_uzs=IIjeBwcbdQ_EwErF>wFK6PWF zEc@hlT^^2-75ihPqWf1_*?e6p+d{IcGAygN{H#yU{8ZLd^l4S{URhh%rPU)p)^&-h z>wes$>j#5c)7zxaoDJx+?=9EbFYD#G9fexgQ6|r?Pu2}Bc~ZZyK!VjCc_AxFHWvLM zn_?_!$cWX3YZs(3`ldGae=bc|&*+OEb;wJh9^Jh6xNhm%r(2s2X!Fiybvm35SHvBc z(>>zuUzT-mpZNo|62?0sEGt^9dxoC3zYKr(`2&`(sEkK|f8j6(%}e0_=P=UzlAE)` z+`Px!;wN+dHm9cLR5#X-`YjikiLIFmG8ber$ZU}5AoD>cgv{!yGGc2|g5<>3Bn8O|k`^Q{NMex8AgMuegCqyZ z4w4=uKS+X*4B46#Avv-&NkX!OqzTCrk|-ooNUD%rA<06rg`^9~7m_d}V@S%7oY|VB zAz4GxhU5)N9FjRCbx7`z^Ng$FzB!x&0ZA}u9EZUkhB6&m-iDVKDtf5EV3hG`B?MFD!Ch0teN9l&p6ktNnrTLxrjMO7=lssVk#GL5+uM6^Ql#^@ zM{o*mx=dL$1rcF09C)Y9NgVlOg+D$Wiz%)H) z)J8Kmex>;$`iA}@GuoNg=w&^>NuJJX8m6;OmzV|44zr-3hW@Jff?k-PtiRs0U1!ga z*WZ|Qv*<&QUi|S^vm_YM zT-0m!_0r$2+@Zf)e9L_Q*)qL$YLH$xVzT~WWSL%{kghj$pKE?>)ko*GUZ?XDcA5M_ zf3q`YyDF&P!0d{jp>~HotoMXZ)O$)l(S?C2y72TFz3+G}v;U_p`oM-#b1>(WIh0*f zAI{utj-)L9)xGBAjY_(>Y?wY(cvhc2&{LmT z{fj=k^tw6s`C@&3%s_Kt=4gGfUyQkwk*F_sNzzvm+v=+g?l-?hhv;k0c~cVXr%MiR zGo`nR^!2q%&5e`3=H{rcOj&+zif>AuE*F}h{kmo8^8T%L1e2U4x@iIzRnt|59W-i6xT)HGt*Mr-%mWd#P4$FJrbftVso5$|YWe;w zwQFZd?PK4_gC*mo&iY9bbY#A&yD(eTTQ^SCpCBrD))1wWQ&od8aq6L1uL|kcOf?K^ zsTy_ZDvc}GR!y2dDow6FCZPdUB($KWG%da;kK~j{*rpxwXl9{=f4NMW4PPUkag)VU z)X<6OyH`b~=BVaTx$3bVpQ;x1rmL2bBUH<>4Am+qQ(7J9r=GZxCau41ub$kOAZ_L) zN>pxZd1`E1iJn?lo=yspw(mco+I8}i_WgrYhek!xv3;59)zUx%X zmW`^*u{o-1_5u~V`Fn|*I6=iPoF&f=9jdxbcu%_b8ZSMP2T0Gylf@g`S)QvEFVBaC z%JbJel2D_QO4waZUbyb?Am^eG5XFPf^VZ_XL@QpRPKn7Kv0oLHdxjXou>M6Xb< z_T4P~gQutgQQ4AIahVLPCo=HlWEoVJDuXws%WDU`G9;^yBjss6Zm*17G*6A%k}Gdd z`9Vf!PnUPn=Ss%J4Dq?IobR8{@_yy+{j5;lKL5e@+`m*Y;O~FXKj3c9@^|~?zy00q z+t(X9j(xAZ@)6Tv+ z+t2plKR ztB)){vi`2N07wN~Z3&PXAVol`fRq8L15ya25=beKS|G(hs)3XPsRvRJS6dOJBuGt= zq99d4%7WAdDGX8>q%=rvkm4ZKLCS;F2PqIzAy-=>q((@QkSZZ%Lh8hS0}6#y3Mmy* zE2LPiwpvKJTy4FOf*}<{N`}-7DH>8Wq-;ptkisFALrRC#4k;c|J*0fDwth$fkqROu zL~4i>5vd|lMx>5NA(2WVr9^6p6cec?QchP}Po$u(wxUQ$k(we!MXHLF6{#yySXWzF z9+VcTEmB;hx=4AE`nuWzBNcYFB}QtD6d9>9Qf8#iNTHERBc(=ajT9THHd1b+-blfb zio4p9BQN$E)3+#KrRjB z+CVN2QJ&BI%SdzwXr#YK4d6!1H*uKEA~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Gibraltar b/lib/pytz/zoneinfo/Europe/Gibraltar new file mode 100644 index 0000000000000000000000000000000000000000..a7105faaeb14ccf0a5d3588e74be709fab224275 GIT binary patch literal 3061 zcmdtjX;76_9LMo{L(&+>j!u9H>M=va5CNCm$P|$%AeWosuBoU=q+|pxQIlEIIL^O@ zB1gz$5;_LBg!`5oxFCuMDQ24H?wDncnxdWW)A*`p8sGGy=b4AknL9l1?*0Be!zL$3 zy8d?6x1aEEmD-1Mvv=*Eg>8z=!WwU#dobMOTfPy(}iUzx@hlVUA#0=uP~Wr z<%i*V)u)Hd>Z$SO+qBiDq<@%M6E{q+4OeDeWV%`Z_-|%IlO(g@mm_B5MVH=m^jE$4 z_z=BieU;w2>Ymy5#TxzH!T`NJBS-I;QKxq%W$Il6^3Cq9!*yx*?Yb5D zk$pKsWq)N0DSx|%99ZR&iZMy*pq!RNsiUMa%}-U{iByLt-_up&Ld=o4Q@XlED^uO4 zPO49yQZd(?|(mZ+DCm#g^lZIUp5o=PlSEH9^~ zs{ZrdmjOdQmVqe~WYDuI;)#!ySNs!YaA=4Oz8Nk_zWr3v_f2Hzjkapo+TUe(wT~KM z8mo~dSJbGi>uPj%g&H%uT8*83NnY)>PK_H`F5_D+P!pnxCAq;Gnb=%p;)NWURF@`` z_hiazCpFiIou*#@C`wWb2B|kDwUnuIJF95}daAT!fAwZW3pG9FwtB0vOQm=4 zmGo<;RYs$ml5uF4%Dj3^W~?kwGb=X9tOYw{c5$A(lbJ7B^Rwg;_ru;^?guw-di~S; z^}HYbZ}0o}|F~;^;I2l>T@PIiT&_UvZpbID#?IOIuXB8Y$JJ+$#}$ix?IAjb#~xg6 zx7$9>>u$H_-`i4V|J7;BE$1DV>%P5)RQl9ITH|PYgER-}4$>Z^KS+a+4k0ZUAm8j5rjX(`fE zN841St4Ldsz9Nlvw4HUdtwnn4Xq$_4*U`2Y>93=0Fw$Y9#Ym5lCL>)&+KluWX*AMl zq}52Tk!B;^M%vBSmVP_hh9ezET8{J_X*$w%r0q!Gk;WsPM_P~c9%(+(eWd+J|B(%F zv^#)o0kQ|kCLp_jYy+|n$VMPLfouh`7szHHyMb&6vLDEXINBXSw#3oy39>23t{~fj z>>RRn$lf8FhwL7*eaQYH8;I;6vW3VV zBAe)FcM;h}WFL`@M0OI{N@OpQ%|vz+*-m6Xkqt$56xmW_PmxV^w7ZIItE1glWMh$? xMYa~%TV!*Q-9@(7^{~^++&;Mf&*ST3N4b3js~nopF|2b~cxXaIICkjh^C#k8TKNC~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Guernsey b/lib/pytz/zoneinfo/Europe/Guernsey new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Helsinki b/lib/pytz/zoneinfo/Europe/Helsinki new file mode 100644 index 0000000000000000000000000000000000000000..29b3c817f4637e98623c5f76a6078f18157b5cfe GIT binary patch literal 1909 zcmdVaYfQ~?9LMp064tP9EC-b$$>pfi)vbh_aw(*PTvDjq5<<$YW15*Y``Q{a4AYt! z@gQts?#8q;n>E9ljk(UuhGEwDz5kxrJn+osoSpycteu{H-yhGs{}r#}<5_R-=V!FfgQME_#$FA%bXh`A zHA>i_Bhs%{C49?vi6||X{tK4KfGm$hrnqIGGg+cSqh(On-Vz;c8BK7*{w56 zi#2ydsd}<9b(UkA&UOyb*7Hxg2i_Fj9I`I1pR9L(lMO+ZRKx~r#fN87+5L-F-oGhT?;q)= zvkkJ@*Q{F(T-U9Qhjm+3y;kq3RzLp^_Pb-izkFMkqu3l2&yJQg)aBR3vO*)QZohxe z%Jx{3%*XA{<>BG?mY?6Rr|0jdyV3m8KHabUi+TMpuiT4+4kD+FoHKIL{D!keP8&II zTXW*bnIor;oI7&z$k`*OkDNb}0FnWc0+Iug1d;`k29gJoh^@&4Nd?ITNe0OVNe9UX zNeIaZNeRgbNeamdNejsfNzB${hNNa|azm0svP05C@-$Vw2{1##F5OA)NM`fNb^PVGEiv#Xi)pZk literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Isle_of_Man b/lib/pytz/zoneinfo/Europe/Isle_of_Man new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Istanbul b/lib/pytz/zoneinfo/Europe/Istanbul new file mode 100644 index 0000000000000000000000000000000000000000..d89aa3a8267402b19e12d1d36b14fda9fcab0073 GIT binary patch literal 2747 zcmeIze@s*sGIM6n`&4VyAGP|o@7?8fc8C7KcHbY* ztc7JM;x8AfKjF>o(r=z`_v#<&sqVV^un4oh{h+;V{yg9IRcnpLDPHrX>^*igE@s;wU*2J!J6UX;KW&?xyEob2T-j%S zTe{!wF7IJ}ACqp2hW$Rjiru!q_c>p{e?>Dxa}}Gj@1`uP+lJkxdOD z=f?U%onSjmwwYlaLyTUH8D{UM&BiV3qD;AdiP3jKUCXV7Y2voLl`Y|eJH7qlid!NA z4te_rk86qCP-{eZ?Bt@sNeR>J8YtzkzlS;P07w-OsautwA#v_`Ib%^J0|!AdIKY$cbx z?W8Q;;*8FE!5LFj?W7LyI%z4howOTso%E;`R(j_IC*%4oYwSBiox9IvTA58bR@Q-m z*0}YtR(8Fx#+L;%HoDTvSiQK^1+(#4f1_#b}#XE*iD*xPy*DF9LdS6u?621pT*Dj;P*>VOmisf4R81yT#77)Ui-bvclF zAO%4xf|LZQ2~rfKDo9z7x*&x?Dua{;sg0{H4pJSYJV zgwzQs6jCXqR7kCmV!7&Sag@td*9$2aQZZLuGNfim(U7VkWkc$Q6po{ENa>K;A;m+g zhm_A%*AFQmQbDAI95v)9B1aWD%E(bijzV%&lB1LywRF|RM5>9D6R9UsP^6+rNs*c& zMMbKLlohEfQdp$2NNJJUy6WO0)pgb7Me2(b7^yH)Vx-1Mk&!ARWk%|Z6dI{CQfj2u zuDaMrwUKhW>Utvuchwa~N{-YVDLPVhr0huDk-{UDM@o;>9w|OjeWd)Zy8g%lxat)^ zmH=4;WD$^6K$Zbn2V^0Tl|YsPSqo$_kkvqz16dDbL0t8UAWMR*39=~2svygPtP8R* z$jTr~gRBj*ILPWC%Y&>BvOunSg^(q3)oX+-60%CjG9l}PEEKX*$Wn>_r;0ztpQkER ckJ*S6W-YOB^vKkaNux$57A7aTPh&!V1}iQw{r~^~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Jersey b/lib/pytz/zoneinfo/Europe/Jersey new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Kaliningrad b/lib/pytz/zoneinfo/Europe/Kaliningrad new file mode 100644 index 0000000000000000000000000000000000000000..4805fe4251e090d867e228ce1efb92f6dbf62be7 GIT binary patch literal 1550 zcmd^;*-KP$97n(7mg_hXHtw0`l1tgRWTiDVj^;*|(@P8ZAW{)DR*w-sL?j|=TBHXp zL&OMDi0YwCDCi-`Y?Oj*DuRf>hq^COC{e$2C&TE$KcLU$-gEsi1Hzo+*>kYnA^&Wt zKH+9N^v!#w(vRL)du#8@*GAv;i0{haUEkGvK3{*=W#5yi4&V5v2HUe6cHhK+%l7<2 zsmx?{x|U@1`I3R&QB7ua$h=mahBN|3HjCf4yfEW>()FClqf&|5g3KAA1E=XXI$RMFX vVq*jciH;E-BtAxfkO)oc5E(I=)Il<$WQ56x6A~yRQu%Kyjn+XIIii07GiH1G literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Kiev b/lib/pytz/zoneinfo/Europe/Kiev new file mode 100644 index 0000000000000000000000000000000000000000..b3e20a7e3946dd522e50db39a81c0e5f4f1bd619 GIT binary patch literal 2097 zcmeIyZ%kEn9LMo<1d6UkUt4s&0YZ|Bc7+RwqCsNU3p2QSl`D}-TbGbY|}X1tvDb-{PuU9&#F8k>FYv&j8}iI~6d?Z}+o zQ?bH5`y)l$&T1fjQj41|N=f^mlvWPO+?B6NS`8qdynO>N7v-nhPFTdw8-m4PKSUx_^}B-h{-OFUY!`SM>2k z@5mEP!y3;$AnPj!v?cwtZpcyH@WX4majac8o$t~oFGcmKqieMFqb>6Ei6SG+ zU`?{D46%ea_Fd!m@+#Xm{^|I$ox>aMe77zfwQ9{33Pr7OeXSKV21E7yu*AGH4;;4b zUhvmp+tGiY#CG%R?j(Ns^LxLWqv6cQabzOMOpvJ{b3rD9%*NGBhd;x7kO?6(ay3&z z=7dZNnH4fEWM0U`keMMyy6vqz?n%pXYrk^v+I zNDf?05|AvonlvDJKoWsu0!amu3nUpxHjs26`9KnaWCTeGk`q^x6eKIICM`%_ki;OF zK~jU{21yQ*9V9(Sevkwq8A4KosX}svBn!zFk}f1)NWzee zAt^(0h9nKinyX10k~deAI3#mO>X6(a$wRV-qz}oT^?(0tkBR1-5+dmjmll>428v4k L;o^Y%G}rS7d^Gc? literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Lisbon b/lib/pytz/zoneinfo/Europe/Lisbon new file mode 100644 index 0000000000000000000000000000000000000000..b9aff3a51cae6888cbd0dce88d4f2d1ace1e88ae GIT binary patch literal 3453 zcmeI!X>d(v9LMn+ks^WzC5UiH)zXwK_DYpoG)-bjE@Epe5nFIv%Y@RTq^e}hLoFrL z5wT1fv2P{zC9%ZTl&IyF+P9-1k~W z^kpMP>gj%7=JHNyW`@UoJ>$_#b9L@YGi#-vxyI}+T=&^9bG=?$-{734Z|q!L z-x3tAZ!MRjZ+*T^-*!Jk-;uM$%-*`n+_@x1-}S|GbN52i+>;P*?j796+!x)^+~3+m zKhQkDJlJxcc_=zlKU8o@&vk30=RVN%!a*8NA4_h9Zd>z9X+3B9-G_PJhm^a zZ=N&Fm3Ls5etc|Y^LXYU*NMnv*NNpz^^=Yw^JGe>>r_yj>(nPx^wT}|o8N?PGV@!U z)V{61S36U6gLbx5x_0)-T&>{dRPFq>9@>TV$;QRBvBss@afWMhcjJ5Kd&cGdEsQIX z4U8Y!`WRP(yo_tLo*CDD>T5SjSJQ4hEv?Qt{QUiYdZ>U!8^ z-N%h(y{mO({rt+Z!GRL;jg3XpKmDeBbIv*0F!_jVG-ihkh+QoM`xJ;E=SC46vRyQe zoF|(2E*4FL5=7HyiK4mB9NGLrKhfgpIN37WF5dbsTDDpdBSW^gly6UMBU`6dm+!>+ z$u`L?MBDZr(jMz0-mP<8wzEGI?LBg32fs_A!@X-F)P0W#&C3z*Us)!?vNA;Y(OoiP zW{T*Txm0!=4>9CFyIUNZjRC^39mh#avZN{k#_Q|eQjh*4cy31^(Q7#&zc zjA{2oj4f{y;~G_xbg*Gv80D}VR*OO&*}N_qbKU(cTx4p75lD=XZJqpj4R-&fb%%J7M; z!XMw&CzW6PneYgQt$VvDzVTB3va2_CK2eXGn;S1T-m8bVj(;$EMZf==?YVjwKV$$c z)d(U(h>RgJh{z}+!-$L{GLXnfB14IcB{GtP-H}rAw|X%8B}Cckzqx~ z)lv;CGP20fB4djTE;72v@FL@j3@|dn$PgoAj0`d|%9d)Fk#V+E1C5L{GSrr8tdYUC zRHKayx1}0yWWX)eh$BO8sm2@`bW1hr$go?gaYqK;QjI(^^vKvFgO7|pGW^K+TdDvc z5kNwK!~h8b5(Oj-NF0zrAdx^qfy4p{1`-V<9F{5`NI;N?AR$3wf&>ML3KAA1E=XXI z$RMFXVuJ(+i4GDTBtDiZK>W8LLOg_shZyk?BqT~mn2 zypVt)5ko?T#0&`<5;Y`jNZgRXA(2Buhr|vE9uhrE6+R?>NC1%tA|XU#hy)RdA`(U< zjz}PpNFt#`Vu=J3iKe9rClXIf6;LFiNJx>GB0)u>ii8!3D-u{FvPfu=*doD2qKkwV ziLa#!FcM))6=EdDNRW{zBVk73j075qG!kkg)=03CXd~f9;%%t{jzrv2g&c`F5_BZ$ zNZ66MBY{UEkAxnHJraB*`bhYZ_#+1Zas*haLjXAjkb?j@3XsD9IS!Bm0XY(oLjgG! zkb?m^8j!;QIUbM$0y!cq)ggf#6PD_rK#mIJut1Ir5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/London b/lib/pytz/zoneinfo/Europe/London new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Luxembourg b/lib/pytz/zoneinfo/Europe/Luxembourg new file mode 100644 index 0000000000000000000000000000000000000000..6fae86c53176e605311e09823fee55d07d157405 GIT binary patch literal 2974 zcmeIze{{`t9LMo{&0@>uZP;YPt+j?`zqb$#GmdP2-dKLi3|Vt6?QFt0?TG9+ZyhIT zv}^W_{f zBQw_ax5MAO!;52&d2t`O&|C^jx7HMQESgl@aNe`#TE)e+Wvg9p?@=u&T%{!!YhCV1 z8$9b4Tyd9126)!bogrlji#!`9Bx!kAf~O*Uvu$HnyJu6gw`9}xc-Q8u*|vAimb$ha z9cJ5ltin@S5o_D#9_rfuY8%_TOJ{oCb44$FKWB-nYFMOu$DlEeozddn6`Skc-SS8G z?mL&=d%i#I{@~0qNA=ks95r>99D8@4a_lQv>fZn64#$VZ!HxrYs~iWXFL4}7&v(=& zMY#`mAL}^M^MK=M`Wg4p^FHpne$kHOR|m_9O+i{;nk*l`9xo?nZI@4S0_D>oee~3d zK5}|+sQPtnborG{QU7J_rDb7?%A7=Pl@YE1&UV^5t-rL1`$gM^eyf44ztAAB6B=}5 zpSHWaO&>bt*7k>1XmHJ53E5I99oCdc$0C(Z^A?FCD@Pt4H$@&vbxLTj6nQivQNlvP z<*^p+B-}e$!*9lG=PO~_r6Ewe9&Vo4tD$ruLCluzD}OLyj})ZZkE*J z`!#KOkqp_gPKRb^%dnzXbol7kbVSxv9r;9-I#UPgGeJXiR74jYbt_ua+YXTQ&jR$> zo1JCM&YyH_gRhKpx03Ny-^ql68!~Zay-b?eAd}}`)8~?Q$&~TOHKWsNncBNjGn?UR19IIJJX3GmpdTVz1NSQUgle!is$n4?0BquXSUX1A| zb5eemms;5*H>$1XdM-(x|1HfsSu6S1&*|Ll~`lr$jR-y58g2c zSwYfbY4U<32FVPP8YDNCCOJrUkn|w=K@x;y2uTr=BP2;kmXI`Anmi$iLNbM<3dt3c zEF@bC{^=iBKbuUjAR%|F_L2>$w-!wG$VON5{+aUNi~vdB-u!|k#t*{d?N`*GLEDi z$vKjAB3D39?3nDd?fow`YlcVkqNLgGk{D1G6%>cAhUo>12PZDL?APP zOa(F*$Ydb1flLQ7AIOAQni)Z+#L~bA(J1GE2xbA@hVx6f#rDR3USPOcpX*$aEp|g-jSSW5|?QnmI!z4Vg7$ z+K_oeCJvc7Wa^N)LnaTIJ!JZj`9mfUnL%U=WX literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Madrid b/lib/pytz/zoneinfo/Europe/Madrid new file mode 100644 index 0000000000000000000000000000000000000000..af474328e580d731316faaf016037d4de332872c GIT binary patch literal 2619 zcmd_rdrXye9LMoE3Eju4n+bOdGs-cCy1I4Yth*&~s|Qpss%j@Y-x zY%WbqQfG=*ST4LQ@1~Natf(xOOUs%)O$oa&`n?}(Ys;-a`=jUCIj`rO4cORz?~ix# zyrM+cKd#Q^8(v&B=EZZ!Gv;mmHQU;7pw)Wyy`%QV>LGS@fKnpYylhe15+DmNn3NyJWRhH@2g7Xj`jwI6u!ml6KHKni_8(8@ARy zJ|NdR-co6wYWdQ4dhH(HnZu)f4X*_^f4DiuclL!(eIKpp?)%uc&N?^qC*S#u1IIf640n^A%s<`BLll`q1VsZ-m%gYtnq(*0kB(pDk{d(!N%Y!YJ+O z4c1<{Bjt{yU$u9{_u8k|7aHVuMuV;$(BO-^^v-kD+V}8_8nSP{gzl)7e(N_%ScOXX zlI7CBxI_j_oh1Wvyb>`yQ|^jRk;u>}xw}iSMEQF(s%5AS`X*8bH}=sXM?2{~bvHD+ z<~xn4`b=Y2p3=C|eHvf5Qxm2&Nn%NzB&FBO(D7>}x!(p!Ni36;wsJ`gsnXO7GbHV| zMLKN1NACMPUx#lg(DeFYdjF~n9q~%2K2Q{)87tG|!7P9E6o$yi$jh4PX_GAflbRjz zm1JMNB%}P^mr)HTRB>$71`bf(lnf%UmozfU6Q>%N(w7MT;digaesA>>jL8HuA zdPQfBcvogkJFSJ`>tuF%tri9B(K-E8=Y0FJ&TT8vc_$X@qZhn7f7=u-KDJOETb-^8 zwoH)6=Z34bDp?ke8!jb9LGnaGn3QJzB2V^k$)dR4y2$=o%DS~`+1Vqq_-d0rwR?*^ z-B7Pf)*aHNwHtKV;u;Ie|A`=97m{9`niNcDv25ao+fSZnyW}kMQd^|FA3M z20vo1c5^LD*;|SnY2;WVM;kfb$Pq`5Idaq;&2dMLJaX)jqmLYaBmqbUkQ5*}K$3uD z0Z9Xr2P6?lCLB#FkX#_iK(c|P1IY)H5F{f=N|2l&NkOvWXwt&}h`czO#2}eLQiJ3M zNe+@7Bt1xekOUzaLQ;g}2uTu>B_vHqo*Yf0kW3+|LUM&93&|FeE+k(_!jOz1DMNCG zBn`pYNOB!bc9HZV`9%_pWEe>? zl4B&vNS2W_BY8#=jbs{0wWG;3l59tlZ6w`DzLA6@8Ano%_g-)fN02@SgXvG%+0uHJWgoA&?g4IOas zkOc3mk&w!*GH|`hprtEha8a?`pFKw&NcTu+LW&HD7$af9L*>D4{bZJ zbXaqs4nOLzBWkZ}#IB!oWcfLbTzy!2; zc_i$TrX+XBWWO&oHS{}4z4C*k`FtvAjmPBC3p-^>&0a}w_(U@{te2_#w(DaH7s})7 zSLw7TU)SkHb2T%*NImJP`b5xFoe?oiXS7A>%)XOl=GVRSscYdf>%ecC)f^z%+j>e) z?Jx3l*;UCcZzDS;1m#7P?Gv)dDgVb6ZCyS;fNO571ybwK57N^{h7kj#-WK>@*vA>tn z9&K8B@`xuK)BoJ;UQlPRZnp$K`Un|9qQ2b-O+Pe!Y9mFFMzIo&RQ@ z+vc$joi0GG+0k4ya@ELXBiD^wICAC4r6bplTs(61$mJu~j}!o@08#>^21pT*Dj;P* z>VOo&(NqE{1yTz~Qw*dUj;0()J&=N+QxQH;5*||%peRUHkg_0kK?;LZ1}P0v8%I+d zq&i4>koq76LMnum2&oZLB&146nUFdmh2o)7NU4xoIhtZ2)k4aJ)C(yXQZb}tNX?L< zAyq@lhSUuy98x)?bV%)x;yIe?A>~8rhZGR0AW}l4hDZ^SDk5b>>WCB)sU%WLq?SlA z9ZfZnaypuNA_YY%ij)+oDNM^kU4;Etx^NXe0!BSlB5j+7m#J5qS0@<{2C z+9SnBs*jW(sXwv+j%EdrB|z2ySp;MikYzyD0a*xSC6J{+)&f}!WHpfGK-L3U5J$5j z$dWjkH9-~ySrueikaa;823Z+oX4n<_dM8-u%MPx)r;i#B^ FKLB)<^?LvS literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Mariehamn b/lib/pytz/zoneinfo/Europe/Mariehamn new file mode 100644 index 0000000000000000000000000000000000000000..29b3c817f4637e98623c5f76a6078f18157b5cfe GIT binary patch literal 1909 zcmdVaYfQ~?9LMp064tP9EC-b$$>pfi)vbh_aw(*PTvDjq5<<$YW15*Y``Q{a4AYt! z@gQts?#8q;n>E9ljk(UuhGEwDz5kxrJn+osoSpycteu{H-yhGs{}r#}<5_R-=V!FfgQME_#$FA%bXh`A zHA>i_Bhs%{C49?vi6||X{tK4KfGm$hrnqIGGg+cSqh(On-Vz;c8BK7*{w56 zi#2ydsd}<9b(UkA&UOyb*7Hxg2i_Fj9I`I1pR9L(lMO+ZRKx~r#fN87+5L-F-oGhT?;q)= zvkkJ@*Q{F(T-U9Qhjm+3y;kq3RzLp^_Pb-izkFMkqu3l2&yJQg)aBR3vO*)QZohxe z%Jx{3%*XA{<>BG?mY?6Rr|0jdyV3m8KHabUi+TMpuiT4+4kD+FoHKIL{D!keP8&II zTXW*bnIor;oI7&z$k`*OkDNb}0FnWc0+Iug1d;`k29gJoh^@&4Nd?ITNe0OVNe9UX zNeIaZNeRgbNeamdNejsfNzB${hNNa|azm0svP05C@-$Vw2{1##F5OA)NM`fNb^PVGEiv#Xi)pZk literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Minsk b/lib/pytz/zoneinfo/Europe/Minsk new file mode 100644 index 0000000000000000000000000000000000000000..28ef30a325b6bf018a9dbe68bc9f45cff080141b GIT binary patch literal 1368 zcmds$Pe_wt9LIliTDH`7$hJ3KnYLUy%ly|Wt<~H_H^JKK5EVO!RCKWH81WDy9iso8 zl8O)`DM9*k$b=3Z!j>)QU@T7&fkeHRD2k}(^VTBhQ0Kl6&+~cS-}de>KHtR7eLZ#Z zM+)_XO(J@8U!n9dVvGdt3>nGEJ-zAFo2c{2b=T>st0CvpvklI)BPUs0c`d!j{W9pv z&vdyyFL>adElL}iP;hbTT)O1`6|?ll8PhX(%v^G!&s=(>+blb<-CVXSVwT6&nib7H zbGhFzy`>J*d#SIh@`JnA_u8daK7X66dN`A;zCEk_Lm5?bKBZRmPpH+0M^$b2WwmDa zW2rkdEcGonrD5A~Y4n_urn>#o^!1{aV08f8PKLp2X?RW0LwwQ-Wbed?Zcyt*LK(Lsr& z##HCYqmqbulC~WCoZP(JJp0@qD}UavWp~W~w^fiYVod8lSd2%KD^gk}dQw#@1tLXG zaWb(j%$|Xu&B%V5z1uv$y;UCTOh~8R;jm^rDk05KIL9_mX(Pl(ON*0G+lN8;~Z zwCV3!UVQrb(l?zQcPD)iPav*9e1SLv@dn}!#2<)5{2U%ZT!Q#yQ9A|k3gQ;TFNk9h z&mgWre1kX#@ebl1#6O6G5Dy_PTGT#5oP>A@aTDSv#8HT+5LY3-LY#$o3vrj=FTr7g z#{`!NK3mjI6TG&l-6r@Aah%{e#C3@85a%J@L)?e>Pv`)o2ZSy_`atLeq!$)-HwgW( cs5?UF385>5zCb!d=#Bi3#c}Ai)zmw_1EUQx;s5{u literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Monaco b/lib/pytz/zoneinfo/Europe/Monaco new file mode 100644 index 0000000000000000000000000000000000000000..0b40f1ec9321e0b521a0d36a8e6ca9a9de08282f GIT binary patch literal 2953 zcmeIzYfM*l9LMqVS5d^<7fmohF$qya?vR?KhDatL>OUnF!9>&~JCZrH3|-FDocr3C zWoV)z-Vm+u3f^ycBdeKdxtcER;_TPVX*IL+KFjr>2hB%4>71Sa>zuPcJYl>)-cghD zW37K2&73z}9J`&1`;a-#$I{%)A*HEBru2M-y`pxhz4E}*=KY0#*sIFAo3h+>X7!xr zW=&*kvvyXRDNmSd){PCdE5Z`Y`tVY-A*+kMvB_++@kYG8=~|B2d_L1u9`A3qoLFzF z)`yy{Wkc+3Z~B-I7EUoA+MVr>3K!VbnT_o2gQCoisH^tQ*fe`rt2%qvz1{ZipElWh zKF>8ZUmP>Fbvw=8T^r24RXO(lcNduh?-_HjsK6YWlx+@YWtbyqs_72%gpht zO8fZ5re!AsN1KzEz2BVLGTEHIHdxMV4AlBH$@1~r@p5+I&*qcD0QoedpEj%MWi{U% zWLlJu*OqU!msayqRHi4Ye_pt@_O{bD=>z1UxZkvG=#LuE<|_?sct!(n?$dTxw(7&5 zm1+CKOEsu=uLM_CNrx4yq~l_hkk{r)r-DLxWXyQ!oaU9#J}L5OM52TRhf9~{?IheM zS;KF|Yu9hXwA;A=?S8b0K3084Bg((m9wiqwa^5M8nqI5Xd0R9l`+~$4R!dw`jl>UI zCJ7x@N@8r0B;GBSocd2et+#-8m#^DL$&|B zUh>pHA5G2+k^y1YG$r}24D>mnsi9v>>W!;1sNq2wR9`30Tv;oFt2Rm6>HV6%WU*va zuGS$_r%LAHH+1O8MLMitf)0PYK)q?H`fOl^j)>@{BW_1)R@;G+b-uMecdM(6+VP8y zKIbQ6%KRm}`nrrQz9~5+^^%)&PR7mtL7(rtQ^seX*1VAAG9jr-^L@AL#15(xzk5d~ z-7VC~bu;yaOJ1F_VYC(;n=FqfsMRV~_Jiw;y-hAGfS~&M~Dfr3C2-(iB&xD@a?A zz95Z3I)k(Z=?&5xq&rA^kp3VILOO)B26xq3G^A@t+mOB?jYB$zv<~SV(mbSlNc)ifAq_-2h_n#tp{vtGq>HXj z8<9RDjYK+$v=Zqh(oCeANIUs+(@zc>igXldDbiDYW20b~o1JwP@A*#%@9kbOWl0@(>6n=g0ln(2GlGSx2>7R^6VI`N3zh;KBpBy5pu?bG%PFn!04+-VxJ z#TsF?L|U}GqM{ePVq&ALmW2=gcE2b}h>TMGkKjFAh{DsRNPT~T`|@s6tPpt)u~di> zk11Y=$~sfB5Vkat`up=dhbjH%^AgXm&+}b>Z9vp=>h-DMt-eN|3VmvY&lk`~Kb*fz zKd$+|=WYW3?rNL7cA&``IC^k}G? zM7l)Uq%dVEC|`c(>l0@Wn-YVt=tS>A+9! zOP)0;>}G+wCCOq zIP-oUv9rFt>11E{+J559kQ3@ZZRhlza&o(m*m*k+JK_2^JHK|&D~KF&3rp3TpR?U9 zN^A8N6m-afu`XE{?9+wAo22+&yDs{?OrH9oMoR|jwDjyEUEH%wm%Np#OJkY3th-pA zu1L_b`k*{B=T|K+8&%vhwxd*-IR*mX5g`|7o>Cad);|zK;9TStGkY!*WH8f=$_7g?K;?{) zl^u>7XIZJUtnvRm`KBrowaUw@c|`eDT%7-iKEMJ0lmD6PzPUK)ymQ!*1CJbfGtz^ho%=CVpf9kP$$J02u>h5Rg$oh5;D|WFU}{ zK!ySt3uG{S&1fLQ;cLbN84zSdkRd_F1Q`@$RFGjo#swJ|WMq(`LB<9d9AtEm;qf)& zgA5QdLdXyyV}uM6GD^rWA>)J$6f#oCP$6T53>Gq4$Z+|Z@j?d7*Nhl4WXPBygNBS6 kGHl4W;s18v0%q@W3Ru2Sq%gNAH=I`(isXm=xA}p;0b|0?X8-^I literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Oslo b/lib/pytz/zoneinfo/Europe/Oslo new file mode 100644 index 0000000000000000000000000000000000000000..239c0174d361ff520c0c39431f2158837b82c6e0 GIT binary patch literal 2251 zcmdtie@xVM9LMqRfk?4(Z-2m9fRIS1cJdqi5tq^l%)GOd(?~^75wR}H%NV3anRBi& zeCt@|$f@O+HN*a(`~$6(+GyeBFXpUVTdb@!xaOz)E$t_w2gJS9I# zM29j*%24E-jtTFMjP=izak;-}eA-n_82h<8qfe`I;9VWxcSP?vzhCb>u~QR|9haoT z9g_UYKAF(0lCtS}NezbNuH~y`qAwt6g~c+-T_EX6F1h=*@#2c{s%tP$Cx4Z$Q+gA0 z>Zw@0r}L(|4}PoDT0hl{tsiUVhGUvl{ibGDT#}qnr{sFNByZ76lApX+3UV5xV7N(U zB(~~|%PVE(uk||XxL5A|tXvD*E7j9AOYhrOq_f+SbWTm07Hyp=_m{+|w>nYgreD!w z@354_e59pmUr1^H*D^2qeVG^TmIwM?lldKQh_B~8^|v(3g2M;&!MZwmsQCq5`0$Im zD7Z$;rUy0PE7ir$1-isNMVAa^X?c8!lwTa9j|@(hrSII(Wxa8-eE(>v=)5K?ng*n@ zH7r$?y|Qxice-l!QCVHlqtz*UWR0goYi@a4*Cwm3{bsk;4u^DIccVUfIiQanTBgAd z*URJEJzCdZCQsC+=#$&>W&OfJ3Dr2|sq6`|q4;NcdbB0=nekd5`BEB24Qa!flhW9K zNuPPET{echbkm*>baTgEeYWwSHnlWqlq1R!J>nnEsF;!e{b^Zo(qE=^^t&CWy=snIbYrWRl1%k!d3Hv^5ju=P*-bs>ocC z$s)5wrfX~Fi%b}qF*0Rj&d8*ZStHX%=8a4onK?3bWbVl1k=Y~DN9J#95`bg?Ndb}r zBne0skTf89KoWsu0!amu3nUpxHjs26`LHz!K{8@%Qi9|JNeYq`BrQl@ki;OFK~jU{ z21yQ*9V9(Sevkwq8L~AgLULqll7wUlNfVMMBvDAFkW?YLLXw4K3rQD}FC<||#%xW> zkeu0?q#;>D(uU*>NgR?nBy~vckmMoRL(+%j4@n@BK_rDp4sA^mkt`x_MKX(|7RfD=TqL{J|FFApCdJdT UiL%?Dn~|T9<@RT1VPi_@% literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Paris b/lib/pytz/zoneinfo/Europe/Paris new file mode 100644 index 0000000000000000000000000000000000000000..cf6e2e2ee95355039a90146a7f77d14224551b65 GIT binary patch literal 2971 zcmeIzeN0t#9LMo{6$BCSi-wq>n1rYy&ybp=hDa(P>P<;SFcCG$j${r^Lzgq3=Ds#( z8JehwPl#5i82J2zOUY_xS}x0_Jd&eK_zCWJP zQwrlqy(PdGVh?UVbkdG=*Re)iI`^b%8cKFV5CzrtF3=uz|9;-9T`<=str{wA}2 zp1;`;-P&xNonb1H=9^99!mY}PB(piP%xuZ&Vr})AYqnlbw6 zP4(t*v$K4twd+N1^ZMec<_)W}^=9!Rt0vpa+C3=7?1{N*?Tyc{YFjl}wRiVf`@Y>~ zz4bx9sr&GRsc+b8_SbGP2iE0T2VYrY4!vs3;gTYAWOA-Knv-RY_4T%n_lz|sdap7k zbE>S97n_$i6h&S>eQk)G*&3{k8&c(+mlEad&`&R)UHqMSx45mmm(@>O)b(=tZx1mo zD<){b()QA7L7K{pWDP8c)YhJM+9q>=+>`L528DmEZQFdT!A;L-@Qnl7?#fQR_x*Bh ze{_Y0)bE$js%q)5W}S3grV=)1zH}-omixv|kj@z%3Gb68_eUj5L};XR@oy)Q-l-aS zGf}&K9--aNwbky&ee{8v+Zt8zrS@2PQKJ{UtuZs|HMU@f#^qj+_~IH#NU4*=;j1L6 z!&*s>FOlRsrP4EGrS`luUV8mBQ+w}Em4`mc(LR-Vno`$WA70pB`<8`jzrt|szo3^q zGSFL73qoW-#5GMzy(0s?PicDiCz5{sstjs+SOzsV$lxm*Wk~fl$vAybGnX%utg7`o zblNn@UiN|x8?{7-7fsX=4;HB>BV8X0&eD-l-E`!wSj`C*F`O%IH1c>zH$X zGPXQWa%;YkaiupTZ)Kz8=be-BbHCCj`tFqpxu>-tY_&{Gsn$Z@-8!j*>ZC7T*2#B@ zbxOl5ee#k=r*0XeMJJ}qQ;Sk`TIC3NdUBXr3zKB}us%{;7%b1kb(9%tKghFzE}0n< zq%+NDQW9`WOU@pXS=TS<>|K@eTw|TiS$#z3R(|2nzk0b`P2%jICRZ<)D?r@7yyG|f=X+P6%N5$m9rTB5dp`5~bM7-TJ+5r~ z9F;bLi^rAfoX#8jvCHLl|9Uz%&R^o^j=% zMe>Uz7|Ae_VkE~%l94PUX-4vlBpS&yl4>N^NV1V^Bk6Xu^Nl1N$vBd7B36jAk4%80Jp;%TAaj6B0x}E8G$8YUOaw9$$W$P6flLN6 z8_0AZ^MOo=qdg5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Prague b/lib/pytz/zoneinfo/Europe/Prague new file mode 100644 index 0000000000000000000000000000000000000000..4eabe5c81bd1eaf255fe09cfe72f10f61762fd5f GIT binary patch literal 2272 zcmc)Ke@xVM9LMoH&J$tE-e`Cv2UtW%o&1K-AhQz~dgm`LBNfp+QR^ZgV^A8lm~)M> zcaG^C;*Vmino)mXt)aDkjuxV6m!0L>a%EaMH*->r+2{G)T3h7be!u(P<8!e47MLb+Z{f8WnZLUdwP%P{`XW5wC~ZOm_{AGut<)- z?$VK0ymDe!ft;*gqhGe(kW+`2$m#8F{i^bmgnh;Gb>@Ja@yw8KVs=Z!-XM{SEh4`K z8{&f*IyI1})BFqMw){UeA?+8PKJ7boIgYFA>ie2_{w=-z^g*5R@ynWY^q9C0_e=7= z1Cr9AlDhQ;nHg%5J65lkJADC3D=Cw^JVlc3&XBt&CrXCXs~Oh{bk>jQni-z1_k0?y z_YRDzr}v`H?mDYk?O$m2mZO>zd{c94B9hlMAo-<(Qm}lt6ejmbQC^D_jkQT}QkNE= zTPJh=YSy{OymJ4y)mqY5tEGc;^?@DpbzZkyAFNB$`R#M$p>n5sgGsU={eqTx$E4i( zxmKioFBO-6l1j&iQaLgt3(vnQi~8RX-|z?O@9dDphY#tJ`g&Q~@uDt!V$HsJ~LGxy*5i$zVo}T3dhRogYi-`@T;t8yDGI^BeJ$O zEbF#i(#Ph#E9+~9HJG|rHk9^j-J~PBF9>)LYi6KTC1(P!W8 zljlYTb?e@bbX$LqKHu7_ZJlizWsh=97=K4OC*F$N{E&B=AF|Co(>}pwixXSS&CW~x z0h_n1ikASNu$v3bA@KjRnPmRS!=>io96!oCbKNjkO69Im44t@S$((r4q>x!5(?aHj zObnSBGBspw$mEdOA=5+VhfENeAu>f|j>sgDSt8R!=4ojr%6DL<$W)QJB9ldCi%b`p zFEU|EGh<}R$efW$BeO=Pjm#UFI5Kl&>d4%Y$s@BzrjN`YNdS@oOOpa52S^f-EFft> z@_-})$pn%LBo|0BkZd67K=Oek1j&e{NePk@OOq5ND@a<9yda4|GJ~WB$qkYmBs)lY zko+JCLNbJ;2+5J9NfMGJOOqxfPe`JWOd+X4a)l%d$rh3>Bwt9vkc=TILvm(ml7?i> z(xeT^8n}zs~WuV`B8J zxc>>T$*>8q$*`$poYtj>n&ygW379e*2FH+dBvKJGKrJF~IVhDUWM+-A zZ^de^OfpB88MS7%Lf2)kQ6imxSZj1~xuUUVPb+7(LH*tju-01ZkN)lV?3~xb17~cE z&pWtgOJk1u+nHgW@ZwnJ#eFI_&%g8N#Nm#viJk+cdwb5bPrNqz(^mtp*Khb_V?*WO zXt_T7ibsZy`1Gew=IKy-iVn$RTVv8*3eAfBqS)gesr39d+LyBtL?e#O6?mf7Td{%>GnKtoSl*or&6OLRa!*8m3A@J zO8?el%^!AK3r=6QZaO|@d5?}*3%ducjE+-Q=FUDVtNyT+U3*65_)n^w^JC|7i~4n5 z)zdmZ@sM7W)1nuRx9NiTZYg;8$8$?g`SdL#HB#7HCq?~B zx0ObSuRdNcPZ^aG-?%P~_*lx4zt&|JzSAqhKF}+M2K4RcU)1G&uj-1y_r%}TsaGC* zLGEa7)|H+6WYwLIOI1^Y1Qs_*u%b*>dsa%dcfM3#%95IxQeE@eb#m9mbiL-pFS2$x zQm^ZoscVn_px3wktn0dm^oF`&efOR**|_v=eNXM6)F&O(4Mly@=zdE!C5mkN{u$Xk zzFoEqw936-2Ian&*Gkhz+w}bhiln(WpdZ+rBwP3A>uswF_4Y=O-jO|9?<~2jADpT5 zuFM$OWq+evqAy8H=(KLV@VPwnMz4N&s9$y;{80Av9g@ARN2RT+OJn)~Ocrtd5KK~-g56@j2CDG4fnpj`Qkz7q4mA6RZ4nj7c1TuQl| zGx_z@yUP{)_a!}Ie%M*kuT!7;-JI$ZXV)XkLe_;W%+ah2SsJo7WO2yqkmVukLl%gv z(9tZBU&|ViMIx(oG|NQRi7XUZDY8^#t;k}L)gsG9){86{SuwI?WX;H;Ijow)vK`I3 zk%c2GN0yGP9a%iGdSv;?`jG-472r?;M^gi&2uKx>G9YzuG=)Ga;b=;M)B-66QVpaW zNIj5(AQeGMg46^l3Q`rMEJ$6D!Z@1BAf<6MwLyx5R0k;!QXiy1NQICRAvHpZgj5MB z6H+InP>!ZjNU0o6t&n0N)k4aJ)C(yXQZb}tNX?L~8rhZGR0AW}l4hDZ^SDk5b>>WCB)sU%WLM^j6rn2x5JNI8*uA_YY%ij)+oDNG(}`2SjtV!hrlP)B07?hmWt&==A8Rh zPUn1nVwFm*VXc9-N}Ek2!mzNMcbQq)+}X?_DcbLStocy>>p463b-2KvzxT&CVRm63 z>pxeJ`G$vUw|RJPx7d7a=wI$^XgJ|)Jaoj_w6EIP{Ki##%jQOVYr!sO+oCFa`=tK% zj+}V=a8asLb9SU0eK%5Tw`a)LZ}pYBp!|Zm;$ikTB^{+cr`|a>I9b2Vu9t?GCi{4g zb?J#JS*hdgxy;|X_-#9=?M#1g=0GR-T(&*|&_B&xK zTbz52w%9VavmL(olpT?G#g3?5=d{lqF88MVr5)nVX~*_IYGlAsjl6I`JDq-C?>n|t zJAd(}Mpf^Z=sgt@vvG@bS)~%Y;8p2bR3i6JnkEnA_#|#nraYL?Pr5~Smxn?-Nq29C zcE8wHDN*w1ESXGfRj@s?Q*FO+@B$j{4ibzmFH`E)j%D*e5elD5Ur0E#_7b^aI04){#6YY)p~r+3K6iuWX^=2OjGy-G&y z*`|*d7t0f?mgwklD|JlK49)9bq`sUieKK;Cj!o#HW1Et6e1{P-{<{c$>SDZ1_~=ia z*bpL6l?@ZL9FXzZ}E7G;NJWrmV6|460RGBk+kdzcg$_vR|WNv1&ycljt zY3~kN>ii`0!kTnm-9eduzENNPpj=+5tgINqzgzJTumR4Mj)L)T7mQeX$H~_SJMupA4o%7O-GQHxSF0IO+mVXZd-VvFI=WE zKxdHFAiY7FgLDUJ57Hl`L9V7lNQ+!ekB}z0nl2%2Li&U>3h5NmDx_Dgrdde0kai*c zLK=p2%+<6E=^4^Aq-#jqkiH>}Lpq1F4(T1zJfwR_`;h)24MaMKw9wV`5NRUPMWl^L zACX2PokUuR^b%<%(oLkDNI#K=A{|9q>S}t5G}YB~6=^HdSER8>XOY$-y+xXfbQfta z(qE*(NQaRYBRxi%>}tAe zZFe<&M;ecG9%((&d!+eD_mTD^{YN$c*#Tq=kUc;)0oes)8(hskARB?~1hN&#ULc!+ z>;|$O$bKLjg6s&gCCHv2n}X~LvMsJ=UyzM)H9LcB4YD`L<{-O+Y!9+O$Oc&+R_ErM z>AC(zp!fFs#vgiH2i^Vq-xV@jWLC&*k(r7xvD)3~J;!X(t$TFyAer|NGg7^hnX81{ Rl*H7;q=ek$Bb~0;3(8WANHwk_aT{^^wf=Z&E?=z*KLv-%>^M5{np8dDO`2JEyPMvH} zf2_6UgqziFZk}t0%{HzAlaD6cy-yN>^!nhf^ak%j*yDT|^?Y+od*7?{#>J*^>HMX1 z+2b+sO^!(UogvwDwO=-0I3)hF-BNKhB3lyGQrS`}TkG6X<#S~M?xjpHKR6Kl>>b$l z&NE!CUa0EV&%!m&ANXr$7IfXjQ(b>!T8Bo*_4c7L9X>IlcO09NhQTYcv-P?(9ylXS z<%81Pkd)@{DT$Pw){!^OvTG@!ch7lc&tgcoJZ#dfvn6`(Wslx>`=gHbf7NZHMbaKy z)E&n&(pfd9V;!%gEB~JEuAGtX53|yfdsBKA?#lkR=OsRMQ4;g_2a?y$Wt1Z)FE@MT zuUfteoCVH8ch2gZHAR2Bic5-=3Vkwvrm6|6Jf$L0_O4z>p?xl1*;h^+>+G9ec85);`+|A0%aL8m zl0Eu;PyItMUzWf4!z{u<;z0u9Vvt0(2&@W;E?E$@R0bB z0Felh5Rn*>Agv}!Bupeus|gf|6bTiH6$uuJ76})L*PsX(i5LkPi5UqRiP~zyM&h=b zz>&z2(2>}Y;F0K&@R9hD0YHWT83be)kbyvk0vQZsIILzskRh>}K|zKE85m?}@V_4% K7eiRz==uq1J1MFF literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/San_Marino b/lib/pytz/zoneinfo/Europe/San_Marino new file mode 100644 index 0000000000000000000000000000000000000000..5cc30403c37c759bf04a23416dcfccd7fa919333 GIT binary patch literal 2678 zcmciDeN0t#9LMoG(}`2SjtV!hrlP)B07?hmWt&==A8Rh zPUn1nVwFm*VXc9-N}Ek2!mzNMcbQq)+}X?_DcbLStocy>>p463b-2KvzxT&CVRm63 z>pxeJ`G$vUw|RJPx7d7a=wI$^XgJ|)Jaoj_w6EIP{Ki##%jQOVYr!sO+oCFa`=tK% zj+}V=a8asLb9SU0eK%5Tw`a)LZ}pYBp!|Zm;$ikTB^{+cr`|a>I9b2Vu9t?GCi{4g zb?J#JS*hdgxy;|X_-#9=?M#1g=0GR-T(&*|&_B&xK zTbz52w%9VavmL(olpT?G#g3?5=d{lqF88MVr5)nVX~*_IYGlAsjl6I`JDq-C?>n|t zJAd(}Mpf^Z=sgt@vvG@bS)~%Y;8p2bR3i6JnkEnA_#|#nraYL?Pr5~Smxn?-Nq29C zcE8wHDN*w1ESXGfRj@s?Q*FO+@B$j{4ibzmFH`E)j%D*e5elD5Ur0E#_7b^aI04){#6YY)p~r+3K6iuWX^=2OjGy-G&y z*`|*d7t0f?mgwklD|JlK49)9bq`sUieKK;Cj!o#HW1Et6e1{P-{<{c$>SDZ1_~=ia z*bpL6l?@ZL9FXzZ}E7G;NJWrmV6|460RGBk+kdzcg$_vR|WNv1&ycljt zY3~kN>ii`0!kTnm-9eduzENNPpj=+5tgINqzgzJTumR4Mj)L)T7mQeX$H~_SJMupA4o%7O-GQHxSF0IO+mVXZd-VvFI=WE zKxdHFAiY7FgLDUJ57Hl`L9V7lNQ+!ekB}z0nl2%2Li&U>3h5NmDx_Dgrdde0kai*c zLK=p2%+<6E=^4^Aq-#jqkiH>}Lpq1F4(T1zJfwR_`;h)24MaMKw9wV`5NRUPMWl^L zACX2PokUuR^b%<%(oLkDNI#K=A{|9q>S}t5G}YB~6=^HdSER8>XOY$-y+xXfbQfta z(qE*(NQaRYBRxi%>}tAe zZFe<&M;ecG9%((&d!+eD_mTD^{YN$c*#Tq=kUc;)0oes)8(hskARB?~1hN&#ULc!+ z>;|$O$bKLjg6s&gCCHv2n}X~LvMsJ=UyzM)H9LcB4YD`L<{-O+Y!9+O$Oc&+R_ErM z>AC(zp!fFs#vgiH2i^Vq-xV@jWLC&*k(r7xvD)3~J;!X(t$TFyAer|NGg7^hnX81{ Rl*H7;q=ek$B5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Simferopol b/lib/pytz/zoneinfo/Europe/Simferopol new file mode 100644 index 0000000000000000000000000000000000000000..ebe9017d40aefd10045b205e5fc1db5329eaf678 GIT binary patch literal 1504 zcmd_p-Ahwp0LSrX&edF|&qbxnnWou`sp&MGrZt;W%(vy%3z4ecI;?k4AS##-?lJ`v(bX{H{+?wK*WHh-Nc`wc zuuldOe#O5^{1z8+PWrqn$F4?_p7$EA!OKSSjkCt8u685kRI`!Vy31I-ztTvnEi%$e z*BfiHoJNMrVPxED53KFZF*bbo9?l$|2yA>Z9?t4}9LVl@6wZG0Hj>kM-n;2|XCSxv zh&QjPH>`-Cca;gee? z_o~wVgQ{$(L~XrLuC`rwDQ`=v#k2@RaRSH z%j)rWa>ti@vgYA{tQ~u)>MnK3`oTM@p{-2@y3VPcdoHTR*25}Tbf6|2iXs@A!J9$mXJLm vn=;kALbhe9_l0Z>*%`7mWN*mkkli8ML-vPk5ZNKJMe)Ck(V-Wdo$vSy{=-!) literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Skopje b/lib/pytz/zoneinfo/Europe/Skopje new file mode 100644 index 0000000000000000000000000000000000000000..79c25d70ef09aaeec21f0a10a029650967172a80 GIT binary patch literal 1957 zcmdVaYfQ~?9LMqhL0H4S7)2!{xg6a~D7WL3s9ZY8CAlQGB%zgCF3rrEeOY4-b6vxT z2jM}?T<4O6HN%WC*O}Qcw>5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Sofia b/lib/pytz/zoneinfo/Europe/Sofia new file mode 100644 index 0000000000000000000000000000000000000000..763e074795b3c7143984c9b334bd6f2f5a50445d GIT binary patch literal 2130 zcmb`{ZA_JA9LMqNC@3Bo`=cQ$AQ%v6N1pL1h>U=dU`II+sl+i6v4{|rL28H8xz?Eb zN37<`P0O)rj%Xu$L9GW}vyBd?m20hBELygv)@IHg#_Ias?95H;o!7nld~VLYF<$Wd z`Zn%p&NH_hxBZ5j6Re-mTxG9eUB5y}Qq!)3nE%+b}qu*ReO0UpyiO8(xyaq+_x? zuS=Fs24rPIzplKnMOOXYsjELIm3zLe*P?+&Ego5|_a0uOC9fpvea)%5=Flp+zdTY) zn-b)K^ov?nIw|FmpJ_$v_fm1?C#jtNu~Y`n$%7NG%i6)Wq-yj-tv=E#>rS1}huYf2 z+j~&gKm5FIXx*kYD_Yf8RiU-<>$J|ZMC-2SXnkC{)PFrkAGww(8&ChPo6fss^YK_| z7``Nr2ChnDe^C65=ViY6e0rYroyF>2QB zzKls|vHyK0F~vj}6C);yXIFIid1;O-HvD$#`NlVu)jm^J<};85A-qWLS=NT*$zXks(7v#>QcA97e}scpS#ZVSpS)$YF>a#>ioi97f4um>kCG zXb0+OM~Vy;87neaWVFa|k?|q}Mn;Sb85uJ&Xk^sLu#s^i19!9|M~3cb$Bqmh89g$5 zWc)|~kO&|lKw^Ld0f_<<1|$whAdpBPp>VXZK!V|Dqk)72i3bu8BqB&ikeDDrL85|$ z1&Iq17$h=CXdG>9kl;Al=pf-i;)4VTi4YPZBt}S(kSHNxLgIu33W*dFDkN4&upDi) zkZ>XKLIQ?F3<(($GbCt8)R3?taYF)!L=FiZ5<4V#jy8Ho_#AEgkN_ePL_&zf5D6j@ zMI?;ImNCL!k<(^OT{EL@y239UXSr|pWwY=de~b;JO`{E@O=FB0n?@T*2|wE$#)aQb qJ>U4I$_DcPB6&+C?H6ua(`9*7)Ki_GRhX5No$sm6&2gRz-2Vcz8|p{^ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Stockholm b/lib/pytz/zoneinfo/Europe/Stockholm new file mode 100644 index 0000000000000000000000000000000000000000..43c7f2e23f3c37c24d39e78f8822b12c5a74b5eb GIT binary patch literal 1918 zcmciCUrd#C9LMqJ0H+uu_9f+y@y{Yc^2lEyHOTaMU@FId=aC3S=N7B1m}4U*)mZ)BkL#wo_Bnf=*WsLV!Nu?W zu`6~pucpr|)kQy~X+|(spZ)StT|6|ao>M>TlD=`xJot@fwO`ciy0e;7c|&qrha|6P zSe9=2K=PB0NkML>6wLL=vV=ZecDq`Z|Jk7{E_vnoo8?+KP^Cq~EA)j!t8`_5qQ2OW zs;dqzm(|5?_0}cGnzWzP=be*c_t#pIdPhp`{U~cgzmTrPrQhxn$eR*b)RDANA zZVN`p_7l-kIW#FddZwkSZ%lSp1*Ll5FIuzmQ>m>S)w+};QeQNv4dEBGF-f)Y?uXhm z*Q&cly7ZOXw(kCTn>K&7SN6PHq%8vhd9^7;Uptg9dp8$KYeSs0kYPXa#M@;Z0G=2tG4^RWN_%syhirm zkUo$`kWL&;D@ZR$Ge|c`J4ioBLr6zROGr;hQ%F}xTS#9>V@PL?rZuEDN7EeA9nv1s zAJQPwA<`n!Bhn<&CDJC+C(DJM-i}Z^$jC72&jP#5&jdYE)jr5H) zj&zQ+j`Z$mnn${KH0>k(BO8G10I~(h9w3{5>;ke4$UYz&f$Rjb706y7n}O_xquCB* zKadSUb_CfHWKWPyL3Rb%7Gz(LjX`z>*&1YTkj+7M$I)yLvOkVygOD9Uwg}lHWRs9x zLbeInC(9KQ8WQGy_%F+nN&X45Q)Z!}#Cot$kbGr}y`k{quu UV0-*|nfaO79)C_YX5~fv4Z~=$r2qf` literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Tallinn b/lib/pytz/zoneinfo/Europe/Tallinn new file mode 100644 index 0000000000000000000000000000000000000000..8a4f12402f0e5dcb5774ce590af7c0fb228098a8 GIT binary patch literal 2201 zcmeIyYfRO39LMqB0T0nh@JAaRiF1S`1(D0)Dm6&-2+ZIZ%8|SgT@bYh705w}{8!Am z#_XH2F;{shMwc1$f!Z3~=N#pUc*0tv7ORzwHTU1jnQg3o?|+`2xYd&$_B;FkUw;mV z!2>?;@RnVzh3clG%{yG2UUTuh*fO`{n{y+_-xWF0KP0Er@;$?yyX(f!oSXUV;!u6$ za;^USO|MM68q{ALDb|x+mu8YPU!S~V;n$Vk`00A@?+ZR!IGZ?`G99eEGjen?^}Um} z@AQkd|K(@xMFX97+Wrinp>aT(_2=owag?r>)$zLnoKY6V(OSOra&RAI-6Dyn!}6>m7CO8hUV z(zp-mOAEVo>0F;KOC6Ll-`Gs~NV8Oo-7EJUTqP@C@=0ZDhOFwpN3X7SORyzXuUT?g zs)BR6+Wo22WPGb@uKcLi#(u2VPK@jOzkf~Fj=rVqA|FcV@PJ--`jkA--mdEho|E+t zJ}(>EwoAi`HVN0&$VTrvY0OzBjn@LQDY;s2`to+!d^JmNIsdC{o$}~yCzEv3*`M`8 zeZT1D!3q6v^OSz1cUB%<`Hp_9DIzUt$Mp7!QE81oCp-KiJAN9HopT+sYrIDu|0XO? zys=fIPaSG-b*S|vp9zI2tORCYimN55NE=}L*d6Mc61 z`7WP~ex7%-11^7ED6GQf3RX363#%$~g&JzjwZ=S~HUzkkZB?FLMDdH44E1-H)L|i?2zg4>zE%hL1c!GW{Suh zkx3%6M5c+%6PYM7Q)H^hT#?Blvqh$h%omw3hZ%F2vZI+ZGHGPi$h47pBNInvj!Yey zJ2H7>_8g|qVg4Kvz##)1Qs8KE;AoP7WWmv-0m%cB2qY6oDv(?t$w0D!qyxzZk`N># zNJ<<{PLQNHnyescLGpqm2FVPP8YDMJa**sG=|S>?BnZh6k|IZwBP2Cl6u|%h literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Tirane b/lib/pytz/zoneinfo/Europe/Tirane new file mode 100644 index 0000000000000000000000000000000000000000..52c16a42bf1ab1b5db5c1e3d4b808ab37384ad18 GIT binary patch literal 2098 zcmdVaZ%kEn9LMoPdKbGPeZlZ*fLKJJUV+QML_yOVpb46CHT**y6A=^XK*XRlYC3F< zx$lEJ%ayB^W95uoBmWGYe`XCAGS|kOI@9S=agOd*PFu42z0aez)`On3b2=#2)XQa2}g|#7AI{WpbBF%9eLzN<*_ev1Xk-Sss$q zq7r$^Um$5o>GE{UL`nAqHT`a$X8e?z#v~M5Q>Am;Wzjs(?gfD95_U|>T<)~)Y zeyM@#3zE~^E4jselDBM^Y zI;V4<&iyb+pRG&PdD{zReyK-;wTbdv+D$D9j!LQLjFzQdk+P8+vT)2vSvc4)&tLyc z7WM3t@`2-8v9n7S@BdVnG&af$UE6f&i|^^OhV@$cOoN8X%XGPKv99n>)fM-$wJM=h zs?Nvj%DWk|>fo=sddMqlK8cg+-aGPA$1hS79u&J~NY-w>tuN0#BU8Ey)6ybouJg%j zfyvTR@|(OKXGv>Tg0@DkNn7kaZ98{b+D9(v8wa}O&A~q1vip>7?b)MmweQuAogHdL zJ@ilKn6dxaX<6|fm=Dy76>F?zB~6Kn{_AsjMaT-4a8-tQD=Nyph5p6of1m%E;}3H% z17r%w9FR%4npyBJ({MHOKqlg9W`axwnF}%*WH!ijkoh1JLS}?a37HczDP&g2v|P=+ zkcqjPnITg{=7vlTnH@4cWPZp5kr^UWMCOQ05}743O=O;~W}?VUk*OkcMJ9{P7MU(G zUu43_jFBlLb4DhO%o>?CGH+Kiab)JMX6ne?k;x;oN2ZU=A4vd`0VD-T4v-`uSwPZ& zB_vHqo{&T#nL<*92 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Tiraspol b/lib/pytz/zoneinfo/Europe/Tiraspol new file mode 100644 index 0000000000000000000000000000000000000000..7998b2d84e3b102c8c962a9519d0818b27f7de5b GIT binary patch literal 2433 zcmeIzeN0tl0LSrj6ZD3J4>h_VZw7&2$UBM#c=ZZoym%EPQjudJVj?9dL4q)I&K0sJ zVU?w3df$_3UgZZGV_6zIc7xuY%?+=#(Xd?$czf{F{Ad^xuacKp6NHETVsB3 zw8rX}_1Lej>NEO>-47id(&G-k=bqWwqsMRWa3^fqttVD&b|;l}8_Bg>JSmyIB6aD@ zA}w^En3dchW{tLp^x#gJeqptk^J~4Fd(tHy9x0U>UF9;fcdpFZF<;JmJw!SyBjx<< zbHsuiZ|SNC77L>;$!yoC$npM6=0;u=xz~Oaizb{Ai-rcoqu;+R^19y@`GcoqL3^7h zJo2VoTvH?5ZO_XkkG&w5RD<8iZBbFb# zAy=ID5i1Y)i?X9v#j2L;qP%lR=;i0d>gFr*iFwDxlVyXlB7Bcnli4jR?eELAp;E5B z{EDm^t(EHr8s$?Liskx4D`fS#jbg*jOj*-aB%ZDcm(T1-6C0Ogh}uers7neHo3eiv zoBcIWpAaDH&F@5m-!0kT=@*UHzLC%MbcroPy|QV~S=rpZPi}2IC|lZFr0p*630{9a zCf+^CKFL1WXYBC3=gu)%z14f-ox^X+e|>0LuwDHRYg2D%UYZuzps80g5^cVk7BWrq zAHR;>@e^7AcWgo9*L0^XjQ4918`h~_6cjB~XCA+R z%{F$bCp`W0!)7b~=SjA!Pgs+@{l`atQ;(8k1GkZhA~Qv%YN_UmOct3fGF@c8$b^v@ zBU47^j7%DtH8O2v-pIs}nIlt2=8jArnLRRnWd2A3__N3Wk^&?LmMRHI7LYU`c|a0@ zWCBSAk_#jmNH&mkAo)NNf@B0qiKWU3k`yE>NLrA*SgOPznL$#6shML@6|D#*8l(j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Uzhgorod b/lib/pytz/zoneinfo/Europe/Uzhgorod new file mode 100644 index 0000000000000000000000000000000000000000..8ddba9097f804a762815a54e47708bf6bfb5814f GIT binary patch literal 2103 zcmd_qeN0t#9LMo{9q#U;2R7zL9C}mlPWqxtD%%AF&^2~lMKes^^ z{??_7-Ve&X6AfB1*rb)Ci}b#|OLXzeKCNoW($%+$4^?|lFiS#|ID<6DbSG8```UR~T zt*g=1-W9sWKS$SG31~xFwKRNri#~LDwyb^kCtY{OBkKoJrE%nvJly-EH1)?N)^tWT z?7FCrEPhWOZ5-F;oI|p)a!6Z}-_cE3s+%snq?@NYbjw(eZao*($KF_{t)H~ZB?D=Va<#Vegf*|IJ4i#(NTNmpT-b~)clcghv*PMngS$+NnBcu;o4 zM|J0+k9F73VST#inD*}LRr|VW=A3TNNV66Xf2)NA*vZ+y8{8gJ~3Yj@t%g`-w5#8n@)!u89{RcnRJix4j? z%eMc#xet6VhiymyeG@y(AG(|P+5I2O`rRB`Cx_FKjUYQgwu0;h*$lE9SF;^tKYSb; zLUx2~$<^!$*%Y!XWLwC-kc}ZbL$-$O4cQ#BJ7jyv{*VnKJ9IT$MD~bm64@oPO=O?Q zMv2 zn!X^7K{|u92I&pb9Hcu)dyxJh4MIAEvK}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw-|-ibY0fu_Dh!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQS&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXCG(}`2SjtV!hrlP)B07?hmWt&==A8Rh zPUn1nVwFm*VXc9-N}Ek2!mzNMcbQq)+}X?_DcbLStocy>>p463b-2KvzxT&CVRm63 z>pxeJ`G$vUw|RJPx7d7a=wI$^XgJ|)Jaoj_w6EIP{Ki##%jQOVYr!sO+oCFa`=tK% zj+}V=a8asLb9SU0eK%5Tw`a)LZ}pYBp!|Zm;$ikTB^{+cr`|a>I9b2Vu9t?GCi{4g zb?J#JS*hdgxy;|X_-#9=?M#1g=0GR-T(&*|&_B&xK zTbz52w%9VavmL(olpT?G#g3?5=d{lqF88MVr5)nVX~*_IYGlAsjl6I`JDq-C?>n|t zJAd(}Mpf^Z=sgt@vvG@bS)~%Y;8p2bR3i6JnkEnA_#|#nraYL?Pr5~Smxn?-Nq29C zcE8wHDN*w1ESXGfRj@s?Q*FO+@B$j{4ibzmFH`E)j%D*e5elD5Ur0E#_7b^aI04){#6YY)p~r+3K6iuWX^=2OjGy-G&y z*`|*d7t0f?mgwklD|JlK49)9bq`sUieKK;Cj!o#HW1Et6e1{P-{<{c$>SDZ1_~=ia z*bpL6l?@ZL9FXzZ}E7G;NJWrmV6|460RGBk+kdzcg$_vR|WNv1&ycljt zY3~kN>ii`0!kTnm-9eduzENNPpj=+5tgINqzgzJTumR4Mj)L)T7mQeX$H~_SJMupA4o%7O-GQHxSF0IO+mVXZd-VvFI=WE zKxdHFAiY7FgLDUJ57Hl`L9V7lNQ+!ekB}z0nl2%2Li&U>3h5NmDx_Dgrdde0kai*c zLK=p2%+<6E=^4^Aq-#jqkiH>}Lpq1F4(T1zJfwR_`;h)24MaMKw9wV`5NRUPMWl^L zACX2PokUuR^b%<%(oLkDNI#K=A{|9q>S}t5G}YB~6=^HdSER8>XOY$-y+xXfbQfta z(qE*(NQaRYBRxi%>}tAe zZFe<&M;ecG9%((&d!+eD_mTD^{YN$c*#Tq=kUc;)0oes)8(hskARB?~1hN&#ULc!+ z>;|$O$bKLjg6s&gCCHv2n}X~LvMsJ=UyzM)H9LcB4YD`L<{-O+Y!9+O$Oc&+R_ErM z>AC(zp!fFs#vgiH2i^Vq-xV@jWLC&*k(r7xvD)3~J;!X(t$TFyAer|NGg7^hnX81{ Rl*H7;q=ek$Boi zb4=$De;{Mk?CKA!HT*HN8Z9K$E<4I$yo{JpOPwt;{O*N>YJa?sMiza(s&1KY6;OM7T8NdXCQiE?HB9F*@ht zN&3LRO?CGDsB^n6Xj=Pcn!f3%X4Ji{nN?$wPdzDsfwdL%EaRq`hMGB2)6 z=UrMO`G2(N{9`UzaIr!Q`m42YaK1jYW05ZGj@O6llXOvgzC2PCp{}|(S)6=D-L45K ziuhEElfIGStKUmW*hwiF8ImQJ56RNNTT(iFLOq=wvh46deYCMr9_x5n%N~DKmp81{ z@&_B#TUx9u9Lsd2GgVjK$k2-DBB?lkmp*ZQwyb*ZH?0gt$?5}BrE1_8dD4GPs=G#{ zraCBVw*90}Eqq^UtA@2MagVGm3~2rBM|53+>bf6x>-q_wZWwCQr!RT+nKvr6;ge>0 z_LV|y>@Sy%>l3wUN3JxN6^O6iAgW`W2GktHH)L>7sx5?Lm)PJRvxMOKO|6~5qWortCRLs_t z45=AXG^A=s*^s&+g+nTbln$vKQaq%3NcoWZAq7M#XlqJ{)DS5mQbnYUNF9+vB9%l+ ziPRD)CQ?nLoJc*9f+7{QH6=xAYHNy$R23;JQdgv~NM(`IBDJ;tf4Pk^1)iHpi=CeA QwA{1|rzbN5)3c-g0&7TFt^fc4 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Vilnius b/lib/pytz/zoneinfo/Europe/Vilnius new file mode 100644 index 0000000000000000000000000000000000000000..3b11880de1007c5097696911d18f7d7eab7a4359 GIT binary patch literal 2199 zcmeIyT};(=9LMqB3D6T7e!=iyfJmeuAsn96Hc^bHDWE7vd?cw5v4{}Ofe&$3>Re;) zTd|c(mrtW|#Jn)O$d)zNs7`opSZma3wX(71{;ix@WA%IgfwFbc+Pds__CK%x{^tNU zeBObL+ne&t-%gx;!o}HRFYf0C?c1?cDK#e#u6Xg(4}Rx#Q%A!<-<7rxH)MuGS1aY? z*L*s7szk0lS0G`3(U3uEo+_B&E?MBRSTW8#Tm{$Su>qENzu;SDZe`DQR!{z*Av<@zVQX; z4S9o^mv08|JwFo6Ix`%c-xCUEA9y>Mv#USoZ+tnJTmO;CYwtDrMX#HJHHS^X*-H(D z$;V|uUWY6g?UF@_J-Q@rdc%@K%XH~WNm|^Lrppf8C-;{}X-Q+El+L-TWhJ9h9`%7% zqKPy$CceMIQx2)_tsjFIBrKbC7UA^uZUDL8vYZta? zpsGUG`c`UP);z7d;n(%?<+A>hINfkPLpEOcNjD8e%jOfYQh)w?d7$e@+0rv84O@of z!Tlrp(9$>M;rfs^rW}>6Mg7|3eM7e;t8V-51#KQ}*X;wH`pD-2ee~5$+VcJmdF*hJ zw)WP_kuLSz77-Z{<()Kn(&VVy$6nD>Zh1|;+Dr&uadHWDYI8~ZKb`h<)+j=us37J<1xlFe#RgD@rlP1_~#AnwO@BP z^z-=7{bodT$oBX%><`%>vO`z9MP!f2CXrnt z+eG$>Y!ulkvQ=cS$YznyJuw6$gYuXBl|`+j_e%SILfVD&3uzeAF{EX#wr5Dwkgg$ZL;8j^4(S}yI;3|<^N{W#?L+#9G!W?^(n43; zL!^nWwu?v`kv<}gL^_GI66qz2!arE|pU^`(ty=wAM^nHETNEP~XvLFCMHw`XdBE58PAE7#BF``SPo;(B^ywItLcrHRD{dvCc+^Iu!?RnYn^X}O@2IKpf!zV|h z;*Ygf@9<*v=oj~aaXs!Wr4}dO=d;fX-mIE366-@ZiyM4%ky_VmqV~IE%J*3m>lQj9 z^)nZX4b#^w|HBJb;P#l+ca~J?k81WVtB^A! zLrXpxp8qn^^5$t|+p|U4T9}n>*I&u)6HnxhvAZ&I{Hok}WLibf+)(XZchs(f=Tt{v zTy;hdsm>oqRjlExjD6@;yO)RMo|nyP??PP0A0%bhi$=LGAC&uV37HtJmfaJ6)zkM$ zCXfA8y}@^~FKMZi^N~z9y;kY3Z&iOqLG{l~s)3JrHTd|lGG^|}%$3v1Gy)^Sv8uvZ zS-PsKS1y;^<*xF2yfuHnR@Vp-F6ci~v_wRuJ>5dY5<<^Z*0ILEUss;x9rpF}dSA~v zK6U6>zGsJfBV!6P*KLZNsmIU(BWF)jWO9T2qFxTYN~83?Jk5Vd;oHiWe(6OLNES#M zNFF>!B1k4kDz-WoBpDzIM(zn(5BS!!^2FOuBjsyP3N5Z2|Ovuo*5 zQ&YGh>qP!na`N(I`D{;+HoTD{pO=o5MrTh}WAR~gYHMlU>CN?7XO`T~J3Bc-n-=bu zb3^Ood~~pU(Y;)ny^m{iYk{^l{;;{%T3$icgzoatIgHzTg{JW zGfn&Y=gqacB=b||T+>k&X09iWGdBtoou7k4O^>L~ydKpvoq$*T&3(nA9a%fX3C!;A z^z`^Sy)qI^@Ax*SPsBy1Z?6U?$mf_7)N#P+cWIZ?|4gY9{J}&92$iAH>bjPbE ztmGvVzG00Sn44_|&7WxoT{zc%f4WB^l2YY?=md!j87vR__mjcCDLVMZNFDNBqz-NB zt5F|y)rabCX>@g~4lBE$F~y(i@U=%YHhaIuEohbztLr2_xn4$2FPBlFRgy5GKoUBO zWXymv9dmK9jQurV6OX3IxG(2uQq3Ywu20m5H%-v-l_B~_PJ~V<9xIQg`D#k`0C_C( zvZkhVN}BH{Ix*sFnRxYkndEa^CN-Rp$1lAplWX@%`pILOv86<&?AxVJtXwHmOE&7X z*_(BG?oypGI#)gE6Ln_L6rB}4RA=3c)j56AWX}1XI`_s9$vkvj=Qa7s{GEZapsr08 z7Iny?vIfao)Fg`wujrET@5+-4PHJ{|g)B|3)tqhzby=wDvX*VSymPg#II&Kjy6DlT zchA$@k5|bvFDL8Dni=x!@^H=DG)h)YOOn+&L9!+;OxC9UBF_aH$sgWF^V`3bf`FS^ z(DF)R6zl`a1z&=V`yBHHP(5ss<0b+Xm^*$=rE0AwY-#@Q^{4zbmV`kfTO6m+Q z9+S#Vs=eWO*<&y6{hiOAe}BsN*k8I&`K{mociWy>hZ?6MPd)PFyV|E8DF9Ldqy$I} zkRl*eK+1sB0VxDh38WNAEs$ck+G-%>Kt)D}$5< zsSQ#bS6dyVJV5N zA(2WVr9^6p6cec?Qck3vNI{W`A|*v?iWC*8s;ez4Qdd`7SfsK@X_49@#YL)%lozQl zQedRQNQsdeBSl84jFcIvv#TvMQfXIPYNXakv5{&c5t6|7Np!;F-<;od5sTPMse8-XG7D`8ko+A6EEVo(VE+*5*~(W!yj&Xc zDO!6y57yod9ktKp7TUM^i#iX!)_&y=G_>@FhAlgz;n~MDBJ;jP7F0`ALXAX^-!3r$ zyCpWVNMajHB+jo~;~veF0pAOC;8~XpdYGZ{m06llGf)R_9Hv8d`s>h~ARSgZK!zuJ zsVm!0Mg+f9x2sVSy{>6e&|^t@_d=4Jo|ojht1{}@0U2F&L{e(cY3i0TNjthv$K>V7 z*s={eZqjBQpF2m>`{$}BB}pgvr0GOwZ=Lu#Tr)Z(O2)l*I{8yCnR4o*PHpg(X?xpB zX7yW{Uh+Y*%IjoCR)fr3{YGaEIW4m@Yc)HtLgpk?X->mKC0k0=(X2^R^KzKCSMz`QvaI%T=6qRg z#A^2I>EG4S(z1Sk^4pYDkL7Z6r+ds791h-G4u|LOr+J0>L;Ey;vA<(kKg=~f{{2Yg zbR#DmIpxSnM@~C(;%&{TM@~L+`uVpMU~4KsN|?UAqCo+3Xu|RO^ryANR>#L zNS#QbNTo=rNUcb*NVQ10NWDnGNX1CWwx(vJXj@Y?QZ`aIQaDmMQaVyQQan;UQa(~Y zvH-{mAWL9t)&N-qTeAwtG9c@KECjL=$WkC{fh-2H8pv`W>wzo?vLeWmAZvmwimh1{ zWLc1PK^6vC8DwdYwLumKSsi3~ko7?p2w5RyiI6oy7RlDE60%ITW}T3QLRJb{DrBvY k#X?pKSuX4Uo-S`QY5gMDG-qm5XiR9hGc_U{!=k)@0!L5Jf&c&j literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Zaporozhye b/lib/pytz/zoneinfo/Europe/Zaporozhye new file mode 100644 index 0000000000000000000000000000000000000000..49b568e773a742c490d8253621e566234d7b19e0 GIT binary patch literal 2111 zcmeIyZ%kEn9LMo<1nPD*`r4w)4G@wHv@2Xd6b%x)UYNnPt6Ydw0t-=#2+bYICh$g=ef_C+pzy=@Wi0M<5eD;=4GMoPeZSxGe)`4KgnuGbdB^y{M8>HjPUf)}ovc@%bLRAKcd~c4Iyvq2 z&Yc^ooVm42oZRw!=dOU?nV0Ex?w)zg$xF%W%)64_ng9Jy(fMCoYFTjR^XNUJ@s_~g zJJE#$$6E^a9gG(4IHkeXx3#G0ycBnhN=e0-EL!`Llx7{4#l=0cc&bmzG6uBl!bVwo zty`C!4#~X}jaoj`tQBL+^uGNoboncpTG^JXEA}mw`>Rqk)S4kH=S^xjG$mCjpJ;XN zH&T812U(T$fvk$3l?T2%Dm5d+Qak>hMh^7L>SM3zgB=|b?cb|w9(qC7wr|q9CG8rk zt=4+qYHbM2*M_S>ZA`0@#;KLPSk{7|HOsP6#S-4ww@u&6DsBJtSErxtEZ%VEXKN!dD^wq|!r_<|sjIO<#!$G9 z4=c?}^T1)-?gf7xwjKNTUFa*&nh&WQWKWkv+PaO(MHQwu$T$ z*(kD8WUI(tknuBx)X%EsLq(MlBkQO05LYjnh32773Cs)%bq*F+%kX|9pLb`>t z3+WfqFr;Hh%aEQSO+&hdv<>N-t7#n4Iakve^qcrI literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Europe/Zurich b/lib/pytz/zoneinfo/Europe/Zurich new file mode 100644 index 0000000000000000000000000000000000000000..9c2b600b103dc4d1f49b5f087055e19d3e031129 GIT binary patch literal 1918 zcmciCUrd#C9LMqB0S*~U_N9i$0t7oE{yFj=LW4|?z^DVIoRk#AKL~0>K}Mh$G#PV^ zxvv|mxnj{|%sHYiP%rS$(sHK6+KM$S=cY5wnsZYI)>!@CPdBW(_IY-m*K>diE`IM% zaQ%+zbn~CfZGXat>mB>yeP)||T=eWT7q8WuOA`Sze74j?dw-|-ibY0fu_Dh!hYueWqy@W0Kx5C>eP} zlDXys$%^Zi?DQtdo@$kxm@dt^vRRh?*{I7-`Q_Q4$~3pHQuBtE>2n<`boocI`h2xl zSF|scl?86~SH;Mx#G9J$pOOOiH(KbuDus7`k)qHqrD)`Wym0-nyf|=F0>fvt_&}$W z^na=^)z!(%o%?k4y7zTW?KUlardET2LVd+kqHBE%b?ro|mPHpx*^iIwtK$n~{qg&{ zVKhR@d!nRb@ORnRdQU35Mr2dvsBGSQTemDfAzLekwJQFQY|9(a>KVthCQh~H)`z-% zszG;LXx5!qg8JH#4O)A?US8jyr*(a$^2YXfeX}D=>R0DVL$ycVN}D6Q@*l|CQAQe5 zqP6kvHED{R(5COclIA;Oy8DYh*)uYvEr-tP-hpm?x4Bna5438C%if`35BoMI{11Do zOl*QH%$P_qk4}GISsXO}{8Ao4{>tTY9>M=Vv*Grae7KtJhxe#SzS-+9d(FFhyAA7q z2=747vZFoE$eBjYH5X?aIp4?`ceLjmIqS%IN6tKQ?vb;PoPVSNqywY{qz9x4qzj}C zqz|MKq!UNm3epSG4AKqK4$=?O5YiFS64DdW6w(#a7Sb2e7}A-eZ4K$o(Kd&4hqQS&uqx^=YeBK;x_BON0xBRwNcBV8kHBYh){ zBb_6yBfUG?=8^6lZTm?7$Oa%gfNTM>2goKMyMSy1vJc2cAUlC<1+o{&W+1!aXtx8| z4`f4-9YMAP*%M?_kX=Ex1=$y5W00Lewg%Z7WOI<+akSfm?2n_}AY_M-EkgDP*(79_ zkZnTt$+$v8>>cKQ*tan=_#fCQyHIwg?AJ&!GpD}?>`wiAtNs})`;4&TwIq!h^A%?# PXCh%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/GB-Eire b/lib/pytz/zoneinfo/GB-Eire new file mode 100644 index 0000000000000000000000000000000000000000..4527515ca3f249a44599be855b3e12800ebe480d GIT binary patch literal 3687 zcmeI!dvwor9LMqBn$hO=nHU-N(OjC={5BD438AqqmtXotj4)(rmX?v0Q>h%sdF!Yp zo})#oKr`C_xqiB{Lb&^cR0@Re!e`th9*Y2 z?s+_D-{FVH75l^M!1wGg(;6)@)Asu5>D%3A#+-HLtLZb$%#7N`e7IWAO1))f51OvO z9<$NRiC<~HX;Y%-HteV8HO|wSO~Q5NnPM~FwcE@usG%1WUDOLVB!qLWFw2r-&GM9GCcA5d$%*NuzjG_IA}ZCata{n3s^>F6=R@i(EQ)-zEM1 z{+IL*D|hQ3mzJ5IzR1yQrUmP@qcZf*qf7NVZ<=1;ZI1b+WpAC=dad5z-D@@!`kT!` zjbwAi%d%x>J=yy9Q?hNTOY-}9)pj{5JBIg_ohd%wmNFScE z)f`EA-W(k^QXlKy#QfnAb3Ce&IT5``p9~Jur>c9*sgjDisC0xrU3gBPInYC&UAQ1Fzp+dQT#VNh zm(@0vcDQxr$+t|ECDnA*5eJQ$8esyvtufWolzAv}wyEyDY-)s_k)W1&Qqy;v)T)&! zwT>;8hl?jj?RAqS_{cm}XJMAAyY>@RZ=$G>S;Lf0N>TO4#i>VPJu0++Q`I26g=*Nj zi!`cKOEqrtxHP`@goFiDm9T;!X;O4a9?LG4@J+kr@hOE8@okPY9r?YuKgkgH$p)^- zKEJ7`lx)>3I#)f>{d3j4?hMr;YLseGnyy*~Pmz`f`m0tYsnYs~_UftqUU_J<~H`edI~ebcX~gem!|UqXTEKlZe|+Gd3s&}XX* z44JA1MQ2IkgE=y|uE^k188W0aMTTxnlh+P-WLRczNy;0cUjH~+hR^Mx-WU=h$>W== z5#63vDTy`Jo00X@$PPEuTY)Z>+O&qGUOcKsRk<#scC1%v=YNyYOXjLE`ML7W)SqQ+ z)(m+!ZH}Z*N|y(G?)T~IbN9(NeedJ@<;vgtoBhiF)3d@qefVELyM6n1j=f;6D$1uE za@6W*pGv&rvhV!;eLjti^SIje@VGkRbM}SH$H&M1INwFjzu(TQcm5bxVDX)Ax$Ix! zcI`g?Taa7oXzwXZ-+9n`fK-z%x0cix%38WQBFC1+%kZvIDK>C3+ z#L;%d(Y6HXiKA@_(iKPB7NjqZwlPR&kk%l*L7Ibf2Wbz|AEZG@hmaN_JwlpHG@ z8|_2-hcpoBAksod+e4&@j<$C?SSkDWJ4f30@)JCo;f$R=sdm#G**&xUcLAD68N03c|>=I;~Ao~Q_D9BE6v|9z)E68R+ zb_=pyko|&e7-YvFTL#%P$fiMd4YF;JeS>TqWal{At%K|xN4t5D-Ggi&Wd9%=2-!i% u7DD!r>wf1D6X@>q|3PH`740Ajuv5uxCsTM_vxw#q?xvC9aglCb1^fkK<6HXx literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/GMT b/lib/pytz/zoneinfo/GMT new file mode 100644 index 0000000000000000000000000000000000000000..c05e45fddbba6a96807d30915e25a16c100257e5 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Sse7xGTH`z)iG`KeIFs-DolG*Dd7oHQeCrbR#|gS` zcsQ%w!@V@=UI*h%*{-2R+nrt}H>3NK_v*H^-QRxIJUCt}534Vko>SN4(TXaYS~yoy z`3jq!5|Z@e8TQHW8+mFc?6X_l(tCEg>ASE&Ki@xYUYzLCFB{_K)t>YE^`fZhZ|;!H z)N(VhvRwvpYwh67!!np!V227j<*hBT@A?{K_~eLv-`T21_TIH0WS#!lc*TA?v{-+x zykkbUmFclrmrb^2RDYS5Fu7@)bw1c)@?)#?*Zykrt+!dmZ*Q^_*Q@0F#Y$T^Ge^|X z*|sPV6yL6Z^=}-IKz+srYXUm7)NhLCXLRv_dm@{J?9^mK(O z+ubV4SLE}%=ifd`?TENPSS5}X1(a$_sxWs|^7CF-A5&|h>v_dgt@HPZ{l$d2Uq36j z!<5LJ$fU@u$h643$i&FZ$kfQ($mE`Gc4T@_H$Rd9k^zzek^_#B!*;$q=w{%B!^^&q=)3^=@LXTL{da@ zM3O`=OD?3zad`rXBAFtoBDo^TBH1G8db)g(ggsrxNXkggNYY5w_#dPlakZD5$PYD~ B5Gw!x literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Iceland b/lib/pytz/zoneinfo/Iceland new file mode 100644 index 0000000000000000000000000000000000000000..35ba7a15f4b679e754d6e7a62048fd13b1438ea9 GIT binary patch literal 1167 zcmd7QOGs2<7>Dt18fOfnC?X|;db=oro2Up}g%cK0v#1$}+9**%p-@4zD=~xULJ8Gs zsu!2h4T7MUMGz!vT9Y}AliAfAwTouiSW~C(`EXT>pjGE^&hN}{W;6dMx@}j@a`VTL zZ@=N`2}xG&)hBarNU9{F zsfn|ap0P*LJqP4zf06dy+A7aFS4v;=etmwmP+l}_(U&K)@@jjzzOGBlo6tP%569*0 z!XG-YDy5le#WGlUTL*`7GBh63;kG_`mprKNFWr$3H>>sIv1S=L7t+z)jq>SGm5fDH zKUY=AmlX{NQ7I|tg|27Yd)XTnCRcv3)xrQdfCa}c=iY+ z7v+#OkwuYJk!6u}k%f_!opxztZKquvS>0)uN7hFQKq^2=Kx#mWK&n8>K9q&TEHr!5bu4=E6-5GfI< t5h)U>5-Ag@6Dbs_6e$&{6)6^})@jQ{>O~4hDn?2+|5wTdw(eyGzW|fSO;rE@ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Indian/Antananarivo b/lib/pytz/zoneinfo/Indian/Antananarivo new file mode 100644 index 0000000000000000000000000000000000000000..750d3dc14cabc52517d6be5d76da4080f213f4fc GIT binary patch literal 283 zcmWHE%1kq2zyPd35fBCe7+bgj$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lrlr|3HA>R4$pvEe!vGAZ*)w2@nmkiGet)xPZ3mnsNaEj^QA> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Indian/Mauritius b/lib/pytz/zoneinfo/Indian/Mauritius new file mode 100644 index 0000000000000000000000000000000000000000..66ecc8f51a7deeca7a35f605b075a57515686a52 GIT binary patch literal 253 zcmWHE%1kq2zyQoZ5fBCeHXsJEc^iO4XFcl-3D&m`61R&CJej9G@I2Rlfq{t;2pJem zUjP*_xCJn>Ffdp+FmU?#hA{Ys27`zYkPHxnkYL>ph!%$bKmf89#06Omrh(RjXpj@Y OG-=M@0=h`ogbM)9_$J5z literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Indian/Mayotte b/lib/pytz/zoneinfo/Indian/Mayotte new file mode 100644 index 0000000000000000000000000000000000000000..750d3dc14cabc52517d6be5d76da4080f213f4fc GIT binary patch literal 283 zcmWHE%1kq2zyPd35fBCe7+bgj$l3Vr)}JkhW?eWw(f!6-{fAc=7@3$E85opWKq|Ei z7+4q>lrC` S4hDw*K!D#=E}-$cCR_kiCKHhW literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Iran b/lib/pytz/zoneinfo/Iran new file mode 100644 index 0000000000000000000000000000000000000000..871078114d04fad40dd34139f9e246e797cf177c GIT binary patch literal 1661 zcmciCT}+K}0LSsC7qTW&Z04rMC@R&7&R^6yqWpWI9H)asicUo@R40_E3mH=v%tg)n zmaWMJWB$!*F4+DJTP&8vh8tbfvSDl3@_f%+*j%{5bM`!+XV1>LI=}DZ%rAFajNgs` z^9d(MiFtA#(rjKjE1LB4kq|xW{xvzfF2)|*ZjTEdPqNR+E7c)snJUyersu}YQ<35I zI&$QqTKf2cUe>=*MfD8I=*un<)9$C2H=k0m)m9l-)Fu)XKSw zWa9L0k@&@>tsh=1@$$4>H4rS4dS}bkH_nSSovAwcj8-X)_0m@PUDyk{_1c_Rm1n%FFBS39Dc~@>~SS@nOMs@CvDz!PQQah4AsV&i0 zr86u->HvKFgi04PuukLKl|aRJ*tK z=%P%EDi&|$o+!82yC77T%419{=%U|N{0v37~tB@SITe^_|}TBYafOi@4Ds}GIX)Zxb|`bghH)zDKT z8!t_YqitQfsX0m=t9m7m7ZnP_$JgJ_-*?(S{TN1^F#mh5{)Q1Rj2VWJp6e527{boI zO>)ok?2S&tX`UI5EnL<+`Pnar^Urg0n_u_NZSv>urp$xcA=BeA^Ftru=k@S)L(fbN848OX-VdKBN3a0

LcU*$tru4i|t=jsdKim4U!|Uu0{$YIH zfs*A_DdHa&W5x9x>J#0c#_99=4e@Wf;>!&oI{qxRiqG#enySL_)z8HB@1*eUOV6e~*uWQiZH(Kn`gTGi2 zO(}MC+mN2L`HCIWK4QgM@A~659w)@mlwYIsU0*`t{Urd+lkL zI`zFrNBQHs`mE{AvG$B3l{#T_h&^-10_`bp_9w27*WR3Cc2dsUI(c@rKPBmq<-7By zof^5_O1;wLpA{OeW_5|bKdes8bL*n7}=@THA%PTd|ocTJjn z$iL6Z%x@I)ath`Agw-M|X3)tVTO_jkDx3vbUF5axbRPb2SmbXR?G$W} zQVTcjkcBnZl)o&~c_eT~J-RYi7Ws~-K>iH**o0lGIN?@Ve076b6cz6*>RPH64~%h2 zTIZ=HUH#7EyDU-K+Ub; zvYuY`)Qw`f{MZTg^!aqT;{6ZQ%HsiN)hk<6byJ%2Ol7r_uioLTUf3X>t-S86@vIUx zS>4Wa;|fKsXIR!=P7-U!os(-jZWHS+e-Ss_D4 z-SX#G=wGk@)teBL-GA@6ArUIXbZ9mR-%Bkxe7JMz)RY z8`(Irb7bqt-jU5CyGOQ<>>p`>tLXsJ0;C5>6Ob+-Z9w{fGy>@a(h8&(NHdUbAnic< zfi%R`bOdRMtLX{S6r?LiTadmWjX^qtv|7Mp_QWarx!Z zxaKzY^iAr2JnNPDMOv9%>*{1^f~US-9i5xC%Oky>9GZ>Omfk!LO|9+gdgz(0W*9e0 zpJqGf#ZE5&%Vq7Gqf~Yltew58lC|%1$3N*tsv~>QliiQxpS~vg+oUdUW+sd8Z>&sm zw{rzC<1uy+!;E4HF@@Mdj3L$#bBH~p0Hgw>1f&L}2&4+645SXE5Tp{Ml=`1kuC&{8 Em)X2_LjV8( literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Japan b/lib/pytz/zoneinfo/Japan new file mode 100644 index 0000000000000000000000000000000000000000..024414031e18e6d8832336887b00a2220d4715fd GIT binary patch literal 355 zcmWHE%1kq2zyK^j5fBCeE+7W6c^iPl;ZW;>>vnPzuD=m$xbahL!p+k^8g8@83fx|O ztKrVG{{nZr_cSmvGeIE(L)`?RCJ+fUp=SXjCq%?6IE2B=8AP~*FfalYfN%(Qr?3L0 zKo|sgfEdIU`~l(t!G9n~J+!0%M3YHJ`OImJk57onkU5{bt^(oWAP25I~WOa>97K}6ET zAO?%6iNq#Co+cUzi5v^D@O)2ACWBx5Jm0%Z+FU-5vyfZ0#jmL`PgqRUEUudiX4`)~ zkKdjiTX(TTSzf!c$`32LGB~Cx2POTKxo}&yCS<7gO@%)Cb?alF+jg@g+e=4o$JxCM z7uVg+y^!wOu2RCIYm_hc_sEEUl4q~A>>MrH5#iEC$GL$pZ5t=`+di5p70f-VbQ#&2uFP8rJF2K$F*pjo^ixvdY;V@X|sOD`GdfF z^+%s(kf3N#L?|j=jSNM{s}Z6o@i$0Ow7eQIiW)_ZqDK*=C{iRTniNrrDn*u}OA)3h cQ=}={6mg0=MV_M1a02vyP9bQHWWWx70c9krqyPW_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/MET b/lib/pytz/zoneinfo/MET new file mode 100644 index 0000000000000000000000000000000000000000..71963d533e444362250dec5465ec58517ab6c09d GIT binary patch literal 2102 zcmdVaUrd#C9LMoP#7buLMT0{EViKZtFV%Kpu{nX^A*^?N^8ZC!O&zq4nrXXm+jE}qXj zvTj>*k?$Ypa`z7xXP3LUpVmL!zp;Pf)cFIGF%1kJ)pKu32YdEubXuFn&aSc(FZnh8 zLeM^Wro>J@nPZ2%?dOLy>%FIye(bnq9En-xfrB=?SC+N?37gXrwd*!C+T7}hWtWHS z`aqfGWaQes>1mdm5LE8P63zcMM++uWb;Ac&=*Hn+6&U^h zpR}UruoYK~Sjn1atu%AM%8ELzY^vK9rT1yk=}oryj}9$49<-aktW)_=gDOUr=$4(! zwX{D&%bK&byl1i9TA83=Q@X9lIjc}`$|@5+QdRcXR(0+>TN(e3tsEb<+s?dbt70!( z_1N1A@9njkBhTyh)>gZt_bIKuYq!?4Y*y_}Es9iE=}v!*)&>@6?WF?MrBqto7c+GC z#rd}G^~ou$^cy4<48etWoZwzY?T zvqzGB){&p0j`QDGXVN8go_ycB&Q0pk*M{t|@eys`|E_k#2K0E>pt|>V%NKW9{D0x+ z^JUD5b02Jy`4awq9}d+}77)UfcT{w_< zAOS%lf`kN#2@(_}Do9w6xFCT+B7=kmiH)ZV4iX(t7ak-&NPv(CAt6FygaiqR5)vjP zPDr4TNFkv@Vub_?iI%4e7ZNWdU`WJ}kRdTcf`&v52^$hOBydRNkkBEqLxP7y&(nnu viJzwnAQC|&gh&jLARnA#=>*{s&7cX4}0QGc-|jv;l>pCYwQs<{zIKioT*il zA9!WzNU@5%nJ%(3O+|H2N%W(WYTEi#9n(0|TvRooFD@B3v3Vmp_L)Kxr;h2ky$6(M z^c8){7Xv2#>`_TL)@c&o8<3>8`jvX2Q!ed!QeC#CSuPJYsVi3cB)R-{HKRCFuFSkn zrOYkUGvhoeH9AA5{x;vFeHEwE!_&>I<3H)y2hN+T22ScZp;_wcC-&*NDKk^sP~gs>gI;#vSU0f=~70*eq3)_NNqmAFqnX&x-fNFY3n624%^9kGW}Z zw=CWMv$=WKHYsWS*xb^!OW#_#-`v);RhKS!)|8d5)@8pBD_>5#_)hOpFL-xXH(4)sVw(_yu`Z=KxdKV=%byY>A!2h9U5LEV(F z!!-LUbn}IfY01dZEhp9_Kq^Y*;VOd`| zsW!A8mW|2h)TYW_*)%n7LPZC4XykLVIc105Jap7-84Kyh`UcG7XXLxzTo4H+D|(ec6XI6FRIfE*)4hKP(286+}FWSCAnPGq1? zJ5pq*$XJoVa*P%kF2{J00dtHP88XM1kwJ5e8W}dnxRHS)BS(hrv|~pGkBlA}J~DnJ z07wLo5Fjx?g1`|4Bn%vJKmy^kkw8M>w6Q>f!4VB497sHnfFKb;LW0Bu2?`PwBrHf= zkia03K|w_ff`mi~2@?`0Bv4KpDI`=*8!IGOP8%&G zTpaO20)|8k2^kVIBxp#~kgy?fLjvctkwZe~w6Q~i=d{s7!p9LmB!CNx0EJtLK&^m2w zk>EOQbdm5nZG4dcBN0YIjKmlTG7@F{--Ov?YrWW3JKMi7w;(q!+n=AGo1dHK`5U$l BL=yl2 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Mexico/BajaNorte b/lib/pytz/zoneinfo/Mexico/BajaNorte new file mode 100644 index 0000000000000000000000000000000000000000..1387475394bc24653c36981b9310eb165ce3e4aa GIT binary patch literal 2356 zcmdtie@xV60LSrzP!c7dRFvSbC#7y6o*chC!G0NaVqBn{#F>#9KL%xx5K0lDV{5HB z9&}Q!q9On+5H{kw_6aVjP>^V!s)w&B&k z)DItfzy6!Vpkt)-?U@U$LH&KfHg&N)PhU#OQscfdIk9A`m~`dJ$xx=4igwDW;W%-5 zd`|w*_nVkLGbVrRJSP^8_3B0EhD7XOr@rRBUa`36O(~9biff-$a!GT8SX#4JE(>lK zaRmXne8YONA|*%0uPRa3Er^#1k=ZICl%*3dtWeJ3#S$+Mh=ap<)BX2nH zky=$6D_7TkE0VK+k~ePoP}~&vrF5ls3)kGVPKjv~DWjk3)T!NK%|M?{JKL((b|2C1 zle^Tq*2i`Fkxj}|eOP8RB&$qcqs%Iqud-dGGACnFt&jFfuXCR8P9(?;e@=+pGgoBZ z`A@~hx8n89AG|JZ>G(t6`r;vRTitj1_VymNsc1moQSqG0U#In*`Fqq|iw|mFa*^`Q zz9I{vJgQ*ifZTF%sk-~6le+NhTIFv!s_*G9R7E@Y>EbT8+PX2Qw>1e>;>^>5?I9Jo zlB7$szfz@_a%I`dpGDaxP8pm#D|VcillP6jBX+ir$@>SM7v&YFhHpkVxk62`H zVee&y{(!?@5^xlA^3A!|oZ^7liFqRaz61YaVYBut{AxJN(vY9vOr{o zRSt+tqWUW@SSY)+Uvs`4o$byj-BTMG*ux4b@$f}WLBkM*M zj;tJ6I7qDS_4004V}e1*8l}9gspGl|V{?)B-66QVpaWNIj5( zSWQKcl2}bmkfI<}LCS*E1t|~B8&WrMM`QlHARYQHC08* zYBhC53X4=0DJ@c4q_{|Rk@6z-MGA~m7%8#U)EFtU)l?ZNv(?lYDKt`Pq|`{Qkzym& e#{Yl0V@%e)ChKYbOm~JmJIh;V}G;lj~TZAhX0IRZM0$AQ=|xcrbM}x z2uqXfl1q6dmk?UHq$JsrvZiH7$MZRq7hZYch41M(pVO&xUVT2^++u%}{IS9I4Tt5~ z!}ADjZ)e-OD_VNXUbp#I_}Y`7_&S1T`Z~Vv>AC;D()X?YYGDg_t>Qk0t(ckePe^gd{Y6F!PQz$o%q$ zCUN&Mos@CeB(Ja2DdC6Af|Pu{a9FiTosy_i`ufDya8JVOpVDmlrPUr=_S33B$!7;xk3#KS@z< zlG*sZQ;MHVF`Hhs%jPSCP08)2y0od=l$~qU<(2nL#hxR&GPg@@@z?6AxVx%);cC5g zY_r-HUL)JS9#A#Ia;4@)xvKpbFWawWsvVCe$j&pRrmjV#epiNRIQT{+V1RoK3<&%O zm*X6d7jc{uMgkqD`LqmmoJK9dJO`+s@6$bA@nm}?*`8(gcv8I9h2Qi3g+=|pK6C7_ z31Sq)Du`JSyIfksAeOb3-kq|2xWVLZfohWQZt83jNp zV3Yu=m(pAkqZC- literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Mexico/General b/lib/pytz/zoneinfo/Mexico/General new file mode 100644 index 0000000000000000000000000000000000000000..f11e3d2d66a2d7c21f498df96c3fd1db3a0d8817 GIT binary patch literal 1618 zcmdUuOGs2v0EUldWF{DuEqXvi5P_NR%*r&a!F2M`q-JGf*4U)!NlUD>4k$rTmW${i zLKM9y)E4>377-*Eg<(Wx7z0I!EjkL4^tzqzQma-$oBqSO-(}!3o9}ZKcs)_#k4-ke za9Odryw6hRakXPuwvCnhIy|}V&XfUnS8$xW>-%y2=xw{Z_gqX?-{w(Y-^fr_zkjc< zzcV*$@It8XO?CaV$dYOiRTU_s9r+?AXGF#>P7rZXPvr6`^F;jIVIBYBhgk9Tu}&Bd zP%Aqg>Q!yu)asr_op}1KTGK3bQpE$6+;B>+&AhBqc2&sKWesXwYPMV-;!%!8(b6$d zFVX@+W!kIVV#B-7GQG)V&htye#@pjMvu2veI{8v(7cEvfW!-vH@>J!@Xx5t- zd{nt%bvjQxRr#S8WxlUXZ5gkX1rM6k))$3x+s#(7{ce&hJaIyoh;wdZ@U_72Qedxvi5(mNBXtoM=J*EFcguU(fF z`|m4x{Dj7`Kc{Cn zUBbD^B~rMl=H|JK9>3@M3#}%8ePo%T7eq0LW)RgNy4f_!L9~OY2hq=_Q4pe`O`{@2 zM~IUAEm|_vgy_jo6rw3ZRfw((Wo;U58R|mxg(wWs7@{&nXNb}ets!bd^oA%7(Hx>W zM0betHjVZS^=%sc83{l#V59)afsq6x3q~4{JRpfcGJ&K5$pw-OBpXONHcdW|gdiC~ zQi9~fND7h_BP~c?jKmS%pdpkbbb(AGa|~%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*V3}_OTly7 z_5QL95wk)c$O)HK>A6}MK$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<%kQ^aNLb8OU3CRk=!H6N3xHk-}s-j-k+xA=1-e!1qB2{vhh^Vv%ZrBQYGrBI@BP2+qRZBwo&W2<|Nh+lKJQ3V zTdc_Y$2G>i;m5Vj{dgX?)!oi~ec+vhjK3`5*kMb&bXZBf*DU$Vla|tb&Qjlc+0t5u zY}^YyHoo$ZP3Vf-jafTw;>voPI1;n;vK&jlutOQs>Xh-}T4e^Zm3d^bZu&l1lb@NZ zn};qcE1sk&FP+xZ+F?!GdR(^@B-r$}Pi#ioPc}0c(5z7lZFb5JntgtT%^7}Qb5D)6 z?2nHqr*FD)U)!a;-D4EovtIe@E~ubAtimN9sVLfJ^RoL?T)Nas68ESyJjRTcXi?Y7>ZRk^9qZtp&$s^(>?Zh2D+D+;uz@_<5}Kne>QGQ-pkU`fqHA`^%OpsW6QRjS7T?2-4PqHJC&`wYR#5c zB`Y%Pl&zR@QPHulSoHd7-TmX!w(`5O)%uNyEjVr0n3 zn2|vvqeg~}j2js^GIC_-$k>s=Bcn%#kBr~f1ptWv5&|R!NDz=HAYnk_fCK`G1QH4) z7DzCVXdvN0;(-JNiHNTY2@(?|C`eS0upn_k0)s>b2@Mh(BsfTPknkY!K>~zC2nms| zixCneBuYq_kT@ZMLL!BP3W*gGEF@Y;xR7`u0Yf5&gbaz9uL~LyH6(0E+>pQ_kwZd< u#107_5b8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48Hesdb*xmIy=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE ze9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<*7-ulRIbVydb;yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fKBc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){FVrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/PRC b/lib/pytz/zoneinfo/PRC new file mode 100644 index 0000000000000000000000000000000000000000..dbd132f2b0bcc8beab08e04b182751795c853127 GIT binary patch literal 414 zcma)%y$%6E6ov01AsdnK0RGwCh(;k=S&0xTQ;84_wi_?7<`F!PCs;~}D7?f(B^vIT zl7ch2`)zh+C+8E>VAZ0p#Q6&b$@1Vmt@shmEEPQ+dAwxQ={D8*Lz@c0P8P$BDh-yh zJRhox=gVq;O|{%Y*PQ{??_KRC8|0oVI%a(=qV1LMrEqU0h@_&_Xe`L@@k|6ZIO2E7 z93L|!ALb9D7bk4{9*EM0TpUDs5CS+32?Qb_WIzakkOCnFLJol-2uVX01tDw5!t~*5 M#r`q2S-#n^-;pXt<8 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/PST8PDT b/lib/pytz/zoneinfo/PST8PDT new file mode 100644 index 0000000000000000000000000000000000000000..6242ac04c09fd4e4952cd16503e954dcbdacec2e GIT binary patch literal 2294 zcmdtieN5F=9LMnsqQq039#A5%lbXa8u7HA9u?H1*!9*~wBxWYY!ypY6xs*V+C5^A4_S zsLS*GA)oF}HG?yM-#S=5TU{P|N?*BHr^e6J z>c2iNRud-!I`W2Bc@AXisAm&ZbZdg1wCy&Dsm?HySI5hgC2=M;=O>BXv&_V)&m`{X z`#S!YW0Ek^V-mj^P)Yq==IVEP)HO%m((1)7b?t7^*R?gP>l?P~8>-f*mv*$IQ(K&x#Js(YHMLp3JCB(lZ;s zRkQMc*0-*BSItiTQfGL>Dq~{QWXA7OnZutO?+=^QoWA2GYp7ka!+VXdccaX0f5hbM zT`IZtFX+7HSu!usqUTpkk^GEGU66NC7R36sKW(z|Ur5ypuUt@zJ{s4BXFpMkkENK~ zPra$`=)7$1e0h($tLZzlq~nMzE$cJOYMz&(xyCFn+9Jz;8`6R7=TulLG`)ZMT2nw4KSO6ksSv+C`YQnr4FDc|Rl)r+glJs~9(X@w@Z=8^=*(@kamS5kRy zkzPCfSGD%TG+i|@q}Cmt(Dx0WQ0qHJ^!*WU6V+Q6a-Z#^tmFLq>)S4H+9UICP`q!SFbCe82#a5h6oG#)u3O86`4IryVCU zP^TR!GE`)&$Y7DtBEv<-iwqbUF*0Oi%*ddTQ6s}f#*GXd896d^ryV;ocx3d*@R9K& z0YD;vgaC;F5(Fd)NEnbfAc1h&NFbqb+E^gLK%#+!1BnL`5F{chNNAikHb`)g=pf-i;)4VTi4YPZBt}S(kSHNxLgIu3%4s8ogvx1Sg#^oKqlJVE zi5C(uBw|R&keDGsL!yR+4T&2PIH!#q5;~`i9TGgJjUEy{Bz{N$kq9CoL}G{p5s4xa zMkJ0%AdyHSp>*0Zs+IS)XMIwrX6p1MkR3xfMSdq9Qfkh&Vgw|O_vVkE{$kdY|k|0c}ww$^@I?X1#yzC2$}R%vdoFV~k7|2MIyR0IG3 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Apia b/lib/pytz/zoneinfo/Pacific/Apia new file mode 100644 index 0000000000000000000000000000000000000000..cc5d2cd2d44390a587c5198a4588c2dfdcbfd24d GIT binary patch literal 1102 zcmd7QO>9h27>DsQI#W?b!b)veAzD+_Vj31*lvL7b%iNi2OZ%GYbn2_6qvNZTgb)jq zq|1s}uyR>cETkeLc4{Svcq3tl*wyE8o*NsnXkz6|=KgMGayR$?cx}DeRpzfV)BVDi zv(A0FpFiv_mm=TI%S<@lFt*fBjFxHR&|7KzasFykA*#*UpSoF=%a)C*$=W&MEqSP^ zMIR+yd|lIaQd-`h(XDUpNbBRAwoP1-_FJcA+YMj0U+9sJvm@Hsw@D(4+?)JaQp3&R7_u^~WSGcTw{S(qJ zzU*&(r3Y$9WT580<`?JXVCk3+nzRglIjIGW%b^!tdicRYIdX4<9=*9nk6p{i(76SA z{8XKs$bHq&jBru7s5tTuA7gy?rmXFBugltwdK0#GKti*u?QS*^W5y><*qBEeiZ&!X z(~?YjCRxXgyZm{)>Bm{`o(RU7`u(VNC-vmrFftZ07=JSwG8{5qpc@bw5g8I06B!g4 z6&V&87a2Ivjf@P9jExMAjE)SCjE@9>M1X{V#DD~WM1h2X#DN5YL=xyiL1ICIL83vz zLE=FILLx#!LSjOKLZU*#LgGRKLn1>$3v{s|!6DHh;UV!M0U{A1AtEs%L7M-f$P#j0 aQCUT-<%ycu>e$MPs`5m2tkT_7m;3_o)Ge<7 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Auckland b/lib/pytz/zoneinfo/Pacific/Auckland new file mode 100644 index 0000000000000000000000000000000000000000..a5f5b6d5e60f15ebdbb747228006e8fe06dd4a01 GIT binary patch literal 2460 zcmd_rdrXye9LMqJpwhVKWemws!@O`gTvUW2LIninl6fQ~qi>S%pdpkbbb(AGa|~%Z3evp;&y&hz^1>^y&*KR)l{ z$}cI3HU4(pV12@yYnS!rJbt5fYi)0HA6U92udV95d2r!*Y0v93-wExLL-8$U$EX(R z45~9bf9%!6!=>htkDBz|!5s6wb8bEQ_7(TB6CY~Vwn2Ay<32sU?uff*^EN$^z03W6 zVY&7?YuqO@>!r_M-2D+{(towuJoVKp$zDN!sMo$d>V0XC_%!^kzJtfrujY*UcQ$H3 zLAT!ca;=U{+O6ZNigkR*GddwJLnn+BX<&S~242`KK~vKusBg6dJHjNmX_4IYov%#V zGD~jmz9=EZJ~H|B0hy9EBvUuG$t}@d8d~y+P7C-!Z*@9ky2k>Y;df4EoSmjKhk9jJ zkEe!x+$7<9LnY#^trA%`Mx6C)C2GwDi7w5On8hDRY;K9p4%;hp;+E)K?|O*~jMTWF zTXf!)0FD2=P7}VkthXI3)!V!LC2>cN-qG-rB(0CpJ8Mo!^2%(PU(hZK5~F2d(o2#u zX`G~nR7vW!UnFfLUlv`h)$}uol5x6N7at3eB^?=>*V3}_OTly7 z_5QL95wk)c$O)HK>A6}MK$EbgLd5#|QpS}#EanPEBvA^Fj+A!Ye zG`tL>xtsH$0f#?l!#Z=%%yJpo`OCQ3rxX{@84ibyb#wju1YfnjW>0YVuZON%*Zhvm zW@M7cERks<^F$_!%oLd_GFN1>w$^Ns=_2z*CXCD&nKCkGWYWm2k!d6IMkbET9GN;Y zcVzO&>}{>-BlG9yk^oyP14s&x93V+RvVf!k$pexIBojy~kX#_iu(h&*q{G(A2a*t5 zD<%kQ^aNLb8OU3CRk=!H6N3xHk-}s-j-kW)djLZxSogYAodKNISGBDIl zVBln6n6-g{H^A37gdxB^guyE~guw}jVITy%g+R?91Hga{$`<_r75EPXVcUWk+xA=1-e!1qB2{vhh^Vv%ZrBQYGrBI@BP2+qRZBwo&W2<|Nh+lKJQ3V zTdc_Y$2G>i;m5Vj{dgX?)!oi~ec+vhjK3`5*kMb&bXZBf*DU$Vla|tb&Qjlc+0t5u zY}^YyHoo$ZP3Vf-jafTw;>voPI1;n;vK&jlutOQs>Xh-}T4e^Zm3d^bZu&l1lb@NZ zn};qcE1sk&FP+xZ+F?!GdR(^@B-r$}Pi#ioPc}0c(5z7lZFb5JntgtT%^7}Qb5D)6 z?2nHqr*FD)U)!a;-D4EovtIe@E~ubAtimN9sVLfJ^RoL?T)Nas68ESyJjRTcXi?Y7>ZRk^9qZtp&$s^(>?Zh2D+D+;uz@_<5}Kne>QGQ-pkU`fqHA`^%OpsW6QRjS7T?2-4PqHJC&`wYR#5c zB`Y%Pl&zR@QPHulSoHd7-TmX!w(`5O)%uNyEjVr0n3 zn2|vvqeg~}j2js^GIC_-$k>s=Bcn%#kBr~f1ptWv5&|R!NDz=HAYnk_fCK`G1QH4) z7DzCVXdvN0;(-JNiHNTY2@(?|C`eS0upn_k0)s>b2@Mh(BsfTPknkY!K>~zC2nms| zixCneBuYq_kT@ZMLL!BP3W*gGEF@Y;xR7`u0Yf5&gbaz9uL~LyH6(0E+>pQ_kwZd< u#107_5JpAzlhWLn_#1y4Cs2Lh6V}J%kfR-fYDk--@ zXEis-YW?v~*;l(zO6pn5OvUBda^#A-!v3w8m8E5#p7*bdF52p@i=K0S&+E6doy*TV zxU{Y&$NAfdwZHIiy6nSy(nfpr9@U-wDGj=R&6lS>9)C=J4;ARk+kTaB|4HLCevQ!eJfz#jwO(PSRJr8c2%9unK7Vp^*lW- zp;-C7hx9EmrzOuBq4Q3}sr;|g&GbFrs9TSDO~DiWYR0Z%Q&_)G&Fl;rU4C3<)xK$p za$b|!B~P2;q$eeibwCGxY?3*N+x48`2vu^fP0u~tsBRA}(en;DGJpScUHU<+T2Qvh zEbQJVWkqMq9qnCmXZp{&ys}w>37_bSf}m7<`<7mu;+MrIy7iJv$#U1;4SMO&Gm$t+z*N z`yzGScO6pSJ)~EkTp?@PkLr70pDhiQyLIC;RjR3Ai@vX|NHtG>)!e^ml4^;5+_d^* zRO`@cvv%}XYVDCyv+n$H^}x%S=E2X;%KAqWP5V0^%0mr4z2Uh%(y?Hq_C!TQ{?jKq z=D+=f!#ipn$KLClsBpNa%ZUqz_df6O{&n+9uXw!kOM^~;XI8m;+1qhE9=?AP{P!7c zwSRGEbiuxG!uHf{E=opbg-pxU&I_3sGBb3i#tU=f*vSF2L#Bt!51Ak`Lu87`99``s zky#?sMCOT16qzY9Rb;NnWRck-(?#ZsOcEA?ZW%ha?cmAd*5P zhe#5UEFx)iwRuDm>1s2Hq|(*q5=kbKO(dO2K9PhX8AVcx% C7-D+> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Efate b/lib/pytz/zoneinfo/Pacific/Efate new file mode 100644 index 0000000000000000000000000000000000000000..1d99519b3782cf55da2249ae2aa23285f27bbbd2 GIT binary patch literal 478 zcmWHE%1kq2zyQoZ5fBCeF(3x9c^iPlq_2mzNM8DWLF&-01JbwBF32p}b3k_Xw*a}O zc?aaHZv-f0?{-j(-W8zaJ=a0mdQO0fcD;kDbX|ZNccO#Z?}PyL*UkIq zz4W-CooMX9z{H3JGBC_N1GJD~#Q{bZ28LN17&v`=Lm0wBgF!?HNCpT(NO1lSh!%$b zKmhUvhzs%xhz5BFM1#BpqCwsQ(IBsZXpr|nG{}n}8stq74e~0826-1mgS-r)LEZ+@ qAg_aHkoUnfFbF_2C>TIAC@4TQC^$egC`dpwg~7rF3?W@ZLoNWbR%6Wo literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Enderbury b/lib/pytz/zoneinfo/Pacific/Enderbury new file mode 100644 index 0000000000000000000000000000000000000000..48610523b747cde5690aa2bf050d07160c893cef GIT binary patch literal 230 zcmWHE%1kq2zyK^j5fBCeW*`Q!IU9h4P#DVreYS1`MkeO}|KsN{F#P`??!dtE|9@%# zNYS}4RuVlI#~bPbKU E0Q1u*i~s-t literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Fakaofo b/lib/pytz/zoneinfo/Pacific/Fakaofo new file mode 100644 index 0000000000000000000000000000000000000000..e02e18e2680060146cf990041560e67cfad63899 GIT binary patch literal 197 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b3-`|-V7@7Y6kFQ{0`2RmOfPsa9VcP`;79Zab sh7j)%pb8KOA#5tl296&PEkM)$|F1I-?ErB>wlEO3h6`w;uAwm(0F+51J^%m! literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Fiji b/lib/pytz/zoneinfo/Pacific/Fiji new file mode 100644 index 0000000000000000000000000000000000000000..b75f194e9eb7a32f3ef1cdb9aea1e4e45aabe1bc GIT binary patch literal 1074 zcmciAT}YE*7>Du4iXcfW3j{GbC^|`(%cY|aA_5PZTVgM=)a;g_GN&d(hFVq>6eM(1 z7=b4ReGm-#QV@a>LG%F`2tricClW*ugiuIP_w{txQ3O5XeSh0KcC!B!w5Gdj-5+PU zJ>lZ)wTt&9r|oui-Q7anu5zuLUy|tNh(xE~N^EVF#`2}o`1qs5v->o0?WG*7c%n^b z^3t?)Lz^?twfW8HY!G~qV5V1FBG07d`guuiSkUCzv>g8NR9jOqX?u5D+xJ(?kw@d& zv8hy!PWG#;-m0DFE=cF65>0jYN$SOC?P^F!*PYkev!zCQ#_wx-WtpTim$i3cQ8K|j z>3g+C`y-cRApb>=t+$(EIkEe_j!o~8la&QMHL+3LvZ8-^Id`*Q|DKzgEn4B+t%6(C7!O>-fkWH^ zzOfVh#TMUw|HS>Y!zO%W7q4xD?BlhKke$4?6|xty8L}I)9kQR-Hbi#x+Lp+kUfUGe z)oa@#`+99-WM^b+WN&11WOrnHWPhXqqyw+Dfb`(CCXg; zH%L23KS)DJM_y|Q>B(zNAzgW`Eu=56HHLKNwbqc{kmiu?koJ)NkOuL8by#dkR)@D! VXH`5L4uwNCW~eq4*;c=!_!kUa7>57= literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Funafuti b/lib/pytz/zoneinfo/Pacific/Funafuti new file mode 100644 index 0000000000000000000000000000000000000000..576dea30104c9efe7350d0b0f896bd0a5dda7218 GIT binary patch literal 150 zcmWHE%1kq2zyORu5fFv}5SxX8VZ{LkhLErjASVR7J|>_P2!jBO&G7@mXZQ~Ub!I$& bKqjvMng%wH#m6@UY9<2%7tnBBLnAH#{uvT7 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Galapagos b/lib/pytz/zoneinfo/Pacific/Galapagos new file mode 100644 index 0000000000000000000000000000000000000000..c9a7371d6b8238c2d898274d32e311b9c5f4c690 GIT binary patch literal 211 zcmWHE%1kq2zyQoZ5fBCe7@M~N$l12Ur$J8QNB{#P)Bpc#fCB&j@7%z^^8f$p1q>WM pz99^*&LIr$jy@qkDG&%D!TcW(EkGmx{|DIv;*w$)7sy^SE&vE0DhmJr literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Gambier b/lib/pytz/zoneinfo/Pacific/Gambier new file mode 100644 index 0000000000000000000000000000000000000000..4ab6c206075ccc92c3505a3c4619e313337204ec GIT binary patch literal 173 zcmWHE%1kq2zyM4@5fBCe7@M;J$e9x0!NS1!|9@i*0|SsOU|{j_4PkJ11QH++LV|HW Sz&b$2{QpnDST2wSmRtbo=p5Am literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Guadalcanal b/lib/pytz/zoneinfo/Pacific/Guadalcanal new file mode 100644 index 0000000000000000000000000000000000000000..b183d1ea6b6386d6dd24110e43044bf7259bc272 GIT binary patch literal 172 zcmWHE%1kq2zyM4@5fBCe7@MO3$eH4A+{3`gz%ca-NNUyw1{NRR5Qbo<5D=Fkgap%m VfORl1{09R3rg8y|*EKZc0su@T7asrs literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Guam b/lib/pytz/zoneinfo/Pacific/Guam new file mode 100644 index 0000000000000000000000000000000000000000..4286e6bac870c1ff15c73b8958c15210a3879c58 GIT binary patch literal 225 zcmWHE%1kq2zyQoZ5fBCeCLji}c^iO)m2+GIBT%G$3y{Omvw(pGOmg`6hA_AXhcGy2 z00{^P!EQ8A14t7Xz}UP$paTDaK;+@kGa$OoOxFh_46>4ek%{^Le=`#hp9rfN7`Q+V J(KR&S0sur8B`N>_ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Honolulu b/lib/pytz/zoneinfo/Pacific/Honolulu new file mode 100644 index 0000000000000000000000000000000000000000..bd855772054f8d41e0158e71c2bf2c04e50e47cc GIT binary patch literal 276 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkZdPZH-HL?~r#o#=TuSt`xY}Fn!N>%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8AxBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Johnston b/lib/pytz/zoneinfo/Pacific/Johnston new file mode 100644 index 0000000000000000000000000000000000000000..bd855772054f8d41e0158e71c2bf2c04e50e47cc GIT binary patch literal 276 zcmWHE%1kq2zyK^j5fBCeHXsJEc^ZJkZdPZH-HL?~r#o#=TuSt`xY}Fn!N>%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8AxBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Kiritimati b/lib/pytz/zoneinfo/Pacific/Kiritimati new file mode 100644 index 0000000000000000000000000000000000000000..c2eafbc71e9f03b9ea21710946c3f3ea56129947 GIT binary patch literal 230 zcmWHE%1kq2zyK^j5fBCeW*`Q!IU9h4Q0ONHeYRQ!MkeO}|8syM|NmzJrT_mgP5>!7 z@_>QG$2Ww*$I~was2T!72-^X(kMjpq&wn7OGxul!aY1&10LWejQY_{IIYZabgbM%y CEhw%4 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Kosrae b/lib/pytz/zoneinfo/Pacific/Kosrae new file mode 100644 index 0000000000000000000000000000000000000000..66c4d658103cc16649efe8b0deda9d9c6d7ce239 GIT binary patch literal 230 zcmWHE%1kq2zyK^j5fBCeW*`Q!IU9h)|2F9Wv+i^UMkYoEh8bsoA`G)OfTUL(0FfZJ zk8cQrw|{U5NDd4_2-^WNA8f}DsGk2oP-iA%0-`~7f`mc#GLT|17swg9hK5`KewiiI literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Kwajalein b/lib/pytz/zoneinfo/Pacific/Kwajalein new file mode 100644 index 0000000000000000000000000000000000000000..094c3cfd75c3009a2aeac57be9aea0468ae4af12 GIT binary patch literal 237 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^iPl|2F9WosvBV7@2^=a~=Ri7-nr?VEO+)+<}3E zfnmh~1{NRR5C&h55C-pX#}JS_7=#eE2WBVl52&91Ku~8Up#Y*m_JV{#b~BJ_ITz45 Ix`swv0Cp}bWdHyG literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Majuro b/lib/pytz/zoneinfo/Pacific/Majuro new file mode 100644 index 0000000000000000000000000000000000000000..d53b7c2d832173ae13ef2c428f7a42c22477c59c GIT binary patch literal 197 zcmWHE%1kq2zyQoZ5fBCeCLji}IU0b(|2F9WMxe+75Qkyb1_l-eh7|`GSbTg#7<@fK nK!QLJLfBN84IDopS{VKVL7kaM1BeFM!a&#>E})INhDKZfkS8Az literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Marquesas b/lib/pytz/zoneinfo/Pacific/Marquesas new file mode 100644 index 0000000000000000000000000000000000000000..c717c12251b45911c0c9d570d6bd240bc08b6b04 GIT binary patch literal 176 zcmWHE%1kq2zyM4@5fBCe7@M;J$e9x03)xZo5}AlS`vf`O5Vk(q&EP7P3w zVc`yt!nz3z91ILA4luCz_=Yg}1%)tp1&4s7!61YL3x7cM{09P%#UL7FIfw>103=P8 NBe;OB(ls>V0su~RF)siB literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Niue b/lib/pytz/zoneinfo/Pacific/Niue new file mode 100644 index 0000000000000000000000000000000000000000..d772edf5b48a5b23b3b74041bb5ac9eb99b82184 GIT binary patch literal 226 zcmWHE%1kq2zyK^j5fBCeW*`Q!IU0b(9cR-7fs#`ej7-e`|0ir=U;vT}7+C)QkMjVL zsR0ZuKE5FgexV^i#Sjoe*b10+96z9X{sTdsd58~)23ZLb23gBMg1uZoM;ID%0RVIm BFzx^V literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Norfolk b/lib/pytz/zoneinfo/Pacific/Norfolk new file mode 100644 index 0000000000000000000000000000000000000000..3a286be3d203cb68034c904d388e7082c650febd GIT binary patch literal 208 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZJk9mgLHj6jjO6+jNd+y({~28M+@7&v@7OdL^6bs;E*3+9SjWr nfdJ$n5EtYq5Djt|hz2!7n(3!3l_iffxZo2-^#Dg86^LLqK>4kQ0Jk9}`dtgh2qt=J)~OGyDgFIy193 bAd~AvK!QN?SbTg#pk^{KZ~+b1wd4W-zZ4Nm literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Pitcairn b/lib/pytz/zoneinfo/Pacific/Pitcairn new file mode 100644 index 0000000000000000000000000000000000000000..d62c648b8e00c511357bc0fe2b3301cb0e7d3ea0 GIT binary patch literal 203 zcmWHE%1kq2zyQoZ5fBCeCLji}c^ZI(sf!Z_Bh&x?Z9EJN|NnPXFtGgp-`~K%;o}>^ s5a0*I!686ZAP_>>WSA{HKOkCwCjS3lXRh}J#0A;JK%7-vKwB-i065_!q5uE@ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Pohnpei b/lib/pytz/zoneinfo/Pacific/Pohnpei new file mode 100644 index 0000000000000000000000000000000000000000..59bd764622fe5f1fc1f18084d14b33fa4cc6f7d0 GIT binary patch literal 153 zcmWHE%1kq2zyORu5fFv}5Sx{OVb%r)h5&!R5FjrEyGACU6bOR=jLrE2!e{so1a)Q# cYd}U%0T}`^kj2M01ZF4$0~g46T|+}I05doezyJUM literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Ponape b/lib/pytz/zoneinfo/Pacific/Ponape new file mode 100644 index 0000000000000000000000000000000000000000..59bd764622fe5f1fc1f18084d14b33fa4cc6f7d0 GIT binary patch literal 153 zcmWHE%1kq2zyORu5fFv}5Sx{OVb%r)h5&!R5FjrEyGACU6bOR=jLrE2!e{so1a)Q# cYd}U%0T}`^kj2M01ZF4$0~g46T|+}I05doezyJUM literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Port_Moresby b/lib/pytz/zoneinfo/Pacific/Port_Moresby new file mode 100644 index 0000000000000000000000000000000000000000..dffa4573a4576834edbe6700cff52b9c5021f4c8 GIT binary patch literal 172 zcmWHE%1kq2zyORu5fFv}5SxX8p=SXDLx6h-kQ0JkA2U!2gh2qt=KTTTGyDgFux(Ks qAiC)7k_ilqObiU2VA;+OP~$m$d_x!ld_e@*9*|ZpAXC@SfC~V?3>#no literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Rarotonga b/lib/pytz/zoneinfo/Pacific/Rarotonga new file mode 100644 index 0000000000000000000000000000000000000000..2a2549024e40e783147c4a2d3c2f8b47d7f96d40 GIT binary patch literal 574 zcmb`@u`dHr9Ki9{YC<|l(KIi;t3IWvnACz842_Uta)X+PM5?hFqz2K=KOo+CHbYnc zK&OVqpCG2%j3&?T``AP*-sRor?sAvh?;GtMA1$aKDP1dz_Zh6h*2G>@X4r1Luv-xB#&fhg`@KTP$s})=54abwsf-T;@#%A5w zsm^0us)l;HH83-o4_&T5nDX1RuFQ2!<*BC=L&@I`=UPijId`(FQqG<3b3Sh-Mz*#i zRoi55>_qabUn}~1z);@w1HONM<78bd*8AKtFfueUI5IpEz$+1u5MGIa1o27~B#c+$ pAc2rbNGK#05)6rkghS#X0g;GENUy|1f_f#Y`nRfS30<8}e*&9qbie=r literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Saipan b/lib/pytz/zoneinfo/Pacific/Saipan new file mode 100644 index 0000000000000000000000000000000000000000..c54473cd630a23427f429eec21a411745d290b18 GIT binary patch literal 255 zcmWHE%1kq2zyK^j5fBCeW*`Q!c^iPl|2FdiE9bZbMkb(e#}c3jL)`?Bbk71P$>HN0 z!r&Va!r+_{90HODgAnZYumUAP7zAK!-X9P?!+#(Ud3fXwh^{l!i2>0d`$5tmH!uJl Y!SesVxe7>t1h+6SaDm*WYiPg)00zx3r~m)} literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Samoa b/lib/pytz/zoneinfo/Pacific/Samoa new file mode 100644 index 0000000000000000000000000000000000000000..1d7649ff71d07a158d69ab0d46a60f89c28683a3 GIT binary patch literal 272 zcmWHE%1kq2zyPd35fBCe79a+(1sZ_FMAqLNzb=JtkkU3VU}Rzj%5AJ*VEF$({s)Lm z4PamalN?}@$HzB>!7n(3!3l_iffxZo2-^#Dg83=BXrfq})xH-sU?(IW(;lp%x! V<9>j3fQ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Tarawa b/lib/pytz/zoneinfo/Pacific/Tarawa new file mode 100644 index 0000000000000000000000000000000000000000..1e8189ce66b42c4bc42a34875dde9e29cca752da GIT binary patch literal 153 zcmWHE%1kq2zyORu5fFv}5Sx{OVZ{Lk26s=N5FjrEyGACU6bOR=jLrE2!e{so1a)RY dXFx_TG64w!4P^204S^ZTz`zAEUf0lw3jjK)63PGo literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Tongatapu b/lib/pytz/zoneinfo/Pacific/Tongatapu new file mode 100644 index 0000000000000000000000000000000000000000..71d899bb963718a00b1b6eaa5e19c42f2281a1c9 GIT binary patch literal 339 zcmWHE%1kq2zyNGO5fBCe4j=}xc^iPl$zrDo=D*!9SX{2XV8y}_U{$ok!TRy50Gps$ z4vb99EUZkdObiTbJ%CCX))p|ZFfeSp03we(U<8_jVEg!nFogJnaBv7nEh8g{1cDI4 z4&nqV13TykSO){ce;}wcW8VRyK@J27gB%H>K@J7cAjg7fpo2j)$k8Ag_P2!jBO&G7@mXZQ~Ub!HqZ cKqjw=00{!kWAX6~ftty{zy&m1*U*Ry0QLG33jhEB literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Pacific/Yap b/lib/pytz/zoneinfo/Pacific/Yap new file mode 100644 index 0000000000000000000000000000000000000000..28356bbf1b230a881bb8c2ec3c87b82a71958352 GIT binary patch literal 153 zcmWHE%1kq2zyORu5fFv}5Sx{Op=SXDgR@6y2#^vuo*5 zQ&YGh>qP!na`N(I`D{;+HoTD{pO=o5MrTh}WAR~gYHMlU>CN?7XO`T~J3Bc-n-=bu zb3^Ood~~pU(Y;)ny^m{iYk{^l{;;{%T3$icgzoatIgHzTg{JW zGfn&Y=gqacB=b||T+>k&X09iWGdBtoou7k4O^>L~ydKpvoq$*T&3(nA9a%fX3C!;A z^z`^Sy)qI^@Ax*SPsBy1Z?6U?$mf_7)N#P+cWIZ?|4gY9{J}&92$iAH>bjPbE ztmGvVzG00Sn44_|&7WxoT{zc%f4WB^l2YY?=md!j87vR__mjcCDLVMZNFDNBqz-NB zt5F|y)rabCX>@g~4lBE$F~y(i@U=%YHhaIuEohbztLr2_xn4$2FPBlFRgy5GKoUBO zWXymv9dmK9jQurV6OX3IxG(2uQq3Ywu20m5H%-v-l_B~_PJ~V<9xIQg`D#k`0C_C( zvZkhVN}BH{Ix*sFnRxYkndEa^CN-Rp$1lAplWX@%`pILOv86<&?AxVJtXwHmOE&7X z*_(BG?oypGI#)gE6Ln_L6rB}4RA=3c)j56AWX}1XI`_s9$vkvj=Qa7s{GEZapsr08 z7Iny?vIfao)Fg`wujrET@5+-4PHJ{|g)B|3)tqhzby=wDvX*VSymPg#II&Kjy6DlT zchA$@k5|bvFDL8Dni=x!@^H=DG)h)YOOn+&L9!+;OxC9UBF_aH$sgWF^V`3bf`FS^ z(DF)R6zl`a1z&=V`yBHHP(5ss<0b+Xm^*$=rE0AwY-#@Q^{4zbmV`kfTO6m+Q z9+S#Vs=eWO*<&y6{hiOAe}BsN*k8I&`K{mociWy>hZ?6MPd)PFyV|E8DF9Ldqy$I} zkRl*eK+1sB0VxDh38WNAEs$ck+G-%>Kt)D}$5< zsSQ#bS6dyVJV5N zA(2WVr9^6p6cec?Qck3vNI{W`A|*v?iWC*8s;ez4Qdd`7SfsK@X_49@#YL)%lozQl zQedRQNQsdeBSl84jFcIvv#TvMQfXIPYNXakv5{&cd(v9LMn+ks^WzC5UiH)zXwK_DYpoG)-bjE@Epe5nFIv%Y@RTq^e}hLoFrL z5wT1fv2P{zC9%ZTl&IyF+P9-1k~W z^kpMP>gj%7=JHNyW`@UoJ>$_#b9L@YGi#-vxyI}+T=&^9bG=?$-{734Z|q!L z-x3tAZ!MRjZ+*T^-*!Jk-;uM$%-*`n+_@x1-}S|GbN52i+>;P*?j796+!x)^+~3+m zKhQkDJlJxcc_=zlKU8o@&vk30=RVN%!a*8NA4_h9Zd>z9X+3B9-G_PJhm^a zZ=N&Fm3Ls5etc|Y^LXYU*NMnv*NNpz^^=Yw^JGe>>r_yj>(nPx^wT}|o8N?PGV@!U z)V{61S36U6gLbx5x_0)-T&>{dRPFq>9@>TV$;QRBvBss@afWMhcjJ5Kd&cGdEsQIX z4U8Y!`WRP(yo_tLo*CDD>T5SjSJQ4hEv?Qt{QUiYdZ>U!8^ z-N%h(y{mO({rt+Z!GRL;jg3XpKmDeBbIv*0F!_jVG-ihkh+QoM`xJ;E=SC46vRyQe zoF|(2E*4FL5=7HyiK4mB9NGLrKhfgpIN37WF5dbsTDDpdBSW^gly6UMBU`6dm+!>+ z$u`L?MBDZr(jMz0-mP<8wzEGI?LBg32fs_A!@X-F)P0W#&C3z*Us)!?vNA;Y(OoiP zW{T*Txm0!=4>9CFyIUNZjRC^39mh#avZN{k#_Q|eQjh*4cy31^(Q7#&zc zjA{2oj4f{y;~G_xbg*Gv80D}VR*OO&*}N_qbKU(cTx4p75lD=XZJqpj4R-&fb%%J7M; z!XMw&CzW6PneYgQt$VvDzVTB3va2_CK2eXGn;S1T-m8bVj(;$EMZf==?YVjwKV$$c z)d(U(h>RgJh{z}+!-$L{GLXnfB14IcB{GtP-H}rAw|X%8B}Cckzqx~ z)lv;CGP20fB4djTE;72v@FL@j3@|dn$PgoAj0`d|%9d)Fk#V+E1C5L{GSrr8tdYUC zRHKayx1}0yWWX)eh$BO8sm2@`bW1hr$go?gaYqK;QjI(^^vKvFgO7|pGW^K+TdDvc z5kNwK!~h8b5(Oj-NF0zrAdx^qfy4p{1`-V<9F{5`NI;N?AR$3wf&>ML3KAA1E=XXI z$RMFXVuJ(+i4GDTBtDiZK>W8LLOg_shZyk?BqT~mn2 zypVt)5ko?T#0&`<5;Y`jNZgRXA(2Buhr|vE9uhrE6+R?>NC1%tA|XU#hy)RdA`(U< zjz}PpNFt#`Vu=J3iKe9rClXIf6;LFiNJx>GB0)u>ii8!3D-u{FvPfu=*doD2qKkwV ziLa#!FcM))6=EdDNRW{zBVk73j075qG!kkg)=03CXd~f9;%%t{jzrv2g&c`F5_BZ$ zNZ66MBY{UEkAxnHJraB*`bhYZ_#+1Zas*haLjXAjkb?j@3XsD9IS!Bm0XY(oLjgG! zkb?m^8j!;QIUbM$0y!cq)ggf#6PD_rK#mIJut1Ir@q1s&U|{mQe*f2X-3|A7JI(EFMe^gs+E2KgTzh$+rtM?7zFKcN zO?rEzPw$k^ncb>v+4K6$eu+=~acwG)@vH;aljh(fM}lh`>hPpVhi1hbb#}?|e5*R? zAC%LvadXzPsLy-W)kW#OToz`ktE>*WPMuNV=xH5(@td1-zufL7sk_xgeZNp<9$w$& zabm?h1q!t3eN*x84U+IMtlXg??b%#1DZwe7>V#BOv@7OMFOiP0{Xe3#xI_j<#mzG+ zIr2%0x7LxG#yb13x1!Qs>-TYfUoFpmJF?o_*Uvue>E1Ny$co64$ePHa$g0S)$hyeF z$jTAz(#YD#;>haA^2qu~0i*&_0;z!%L8>5SkUB^qq*6p%3aNz@L#iR=ka|c#q#{z1 Zd8o;uC{h(Ei_}F5%l{v3Vp;$I literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/ROK b/lib/pytz/zoneinfo/ROK new file mode 100644 index 0000000000000000000000000000000000000000..fd91d5b729aaa78253bd439c36a103fe88ce33b2 GIT binary patch literal 571 zcmWHE%1kq2zyRz(5fBCeaUcewSaLHNy!y|vq4^OhU|9C1a zk?|~vx#GE?&x;rO1r;xzf5~{oA5`(`__d7J?@cP+EZ>pw_PkidyY^WbA504?6c#5; zP|Qo8pw#R%LD|o90s|uxGYcat5@cZmLI#F%4xp_JkLLg z28NymjDkMCAq?KZAq-y5AOeWJT|z)Qkw6Fue*FQ}_a6vAeg@GXe}ia{-$69U{~#I^ z1|S*~4j>v779biF9v~VNCLkIVE?^oMHXs@lK42OcMj#p#P9Pc-Rv;P_ULYD2W*{0A dZXjoY!VW}(!Vg4)!VpB$D;&9i;jC-P1ptE4#?=4- literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Singapore b/lib/pytz/zoneinfo/Singapore new file mode 100644 index 0000000000000000000000000000000000000000..9dd49cb7a72f1e0708e92fa53b7e0b4fa001553a GIT binary patch literal 428 zcmWHE%1kq2zyO>;5fBCe7+bml$Z2bCUA!yZZ^8ktCkdxEKTSA2F`na+NA8F3yL>yu zCVu5$WMXDvWn*RMU|>j$1?dKoEDQ`u1q?uubOlCU5D8*uR)EO52@Jwuwm1VrK?8%h zk8cP=FcAAX`hbbx5C$(G4t5U#nS};I2)msNs0HMH=^s$N|AC;+EO;@92Kfa*sGIM6n`&4VyAGP|o@7?8fc8C7KcHbY* ztc7JM;x8AfKjF>o(r=z`_v#<&sqVV^un4oh{h+;V{yg9IRcnpLDPHrX>^*igE@s;wU*2J!J6UX;KW&?xyEob2T-j%S zTe{!wF7IJ}ACqp2hW$Rjiru!q_c>p{e?>Dxa}}Gj@1`uP+lJkxdOD z=f?U%onSjmwwYlaLyTUH8D{UM&BiV3qD;AdiP3jKUCXV7Y2voLl`Y|eJH7qlid!NA z4te_rk86qCP-{eZ?Bt@sNeR>J8YtzkzlS;P07w-OsautwA#v_`Ib%^J0|!AdIKY$cbx z?W8Q;;*8FE!5LFj?W7LyI%z4howOTso%E;`R(j_IC*%4oYwSBiox9IvTA58bR@Q-m z*0}YtR(8Fx#+L;%HoDTvSiQK^1+(#4f1_#b}#XE*iD*xPy*DF9LdS6u?621pT*Dj;P*>VOmisf4R81yT#77)Ui-bvclF zAO%4xf|LZQ2~rfKDo9z7x*&x?Dua{;sg0{H4pJSYJV zgwzQs6jCXqR7kCmV!7&Sag@td*9$2aQZZLuGNfim(U7VkWkc$Q6po{ENa>K;A;m+g zhm_A%*AFQmQbDAI95v)9B1aWD%E(bijzV%&lB1LywRF|RM5>9D6R9UsP^6+rNs*c& zMMbKLlohEfQdp$2NNJJUy6WO0)pgb7Me2(b7^yH)Vx-1Mk&!ARWk%|Z6dI{CQfj2u zuDaMrwUKhW>Utvuchwa~N{-YVDLPVhr0huDk-{UDM@o;>9w|OjeWd)Zy8g%lxat)^ zmH=4;WD$^6K$Zbn2V^0Tl|YsPSqo$_kkvqz16dDbL0t8UAWMR*39=~2svygPtP8R* z$jTr~gRBj*ILPWC%Y&>BvOunSg^(q3)oX+-60%CjG9l}PEEKX*$Wn>_r;0ztpQkER ckJ*S6W-YOB^vKkaNux$57A7aTPh&!V1}iQw{r~^~ literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/UCT b/lib/pytz/zoneinfo/UCT new file mode 100644 index 0000000000000000000000000000000000000000..40147b9e8349c50b9b5459d34a8bf683c91b182f GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5Ss literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Alaska b/lib/pytz/zoneinfo/US/Alaska new file mode 100644 index 0000000000000000000000000000000000000000..a4627cac0628381c6a64f20dfd9d93f4b03fcba0 GIT binary patch literal 2384 zcmciCZA{fw0LStFMdZQ_iWI^JvXctPqX!g(vb;PAhy=*(rbH@u+M{XsK zHixs(agMDvX=l2Xu0^-3wTU@~TMuKAx;2xfYdy`PxSj8*trs<4a{l}OKL^GO-u%A4 z+NNi+od28%^BZ2y5%Y3S*=Ih(hjId=i+*kytuK(jkCv#Zvs=8uH&;uiuh{E)d5H|^ zO!CfXEz+U&g@KvXi8?GlA>hso*X~z01tOv+bj0A8*Ym?geb<%YK;&09WK>`#51hE6L`R-{uxnT?^v{=zwtXiSXa6Y^)?5&YvERs~ zlv5&UYEmbA4vXaRuXV~qi%1>%K&M@PMWy$j&>3g8tEGEi((<(`mD#9dR$G=@RsPhHy;|nR&lb7Ym&m-yU&KS7dgQ}nUx<}DQ9ttDIq~R`aJ}mIap7&A z(D?`6RRxt7bz#FXRkZYNU0k$F75{WqmZbNIlFR31S=3HZc4k1XxmhR5U;j|A{k&6E z>^`X zwF8mz$?@}|?!Ybi)X;#~*f1hD^>>TS72oUn?vv{2^hn3-qY8v$GZP%mJvq#(Hb0aC@`Hsisb{#32{VQZk&n?lKGgrRQJR;g+CuMu( zY0(~x(;c~&RLA&lx^s?JokL@K$L(IVv;Td)>&tevd+!^1&so3PyRl2Z*q5tZAv0W| zGw<|-g}bNm2?pCb9mjk(JE6f~SBop$exvMfX4!ijUnf2o?AhlM_MWh|!sis`^FG0+ z%ID;l*6=CvIUBf?n@fJVxtZhQ!}y&=dXWWN&5DsFBWp$$jjS43 zHnMJH;mFF7r6X%c7LTkRSw6CUqyR_-kP=u;4XmaJNEMJWAay_rfm8x11yT#77)Ujc zav=3U3W8L`YD$9C#A=FyR0SyuQWvB!NM(@HAhkh?gH#784^khbKuCp<5+OCRnj#@p zLdt~H2`Lm(DWp_Lt&n0N)k4aJ)C(yXQZcJ38B#N=DH>8Wt0@~&H>7Y#<&e@LwL^-B zR1Ya1Qa_}CNCmB?gh&morie%tt)`4f9g#vJl|)L3)DkHsQca|sNIj8)A{9kSiqzC< zii%X#YRZb#)oKchR2C^MQd^|BNOh6&BK1WIj8qsYF;ZizDKb)Jt0^;5XR9eRQfZ{r zNUf1#Bh^OAjnvyQ`3?=48Fr_~9T7f_kK=r7A~_8e|2O0R+&uI>kJ6`AEX&BskZBc} KnHiZG((@OSrEiM> literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Aleutian b/lib/pytz/zoneinfo/US/Aleutian new file mode 100644 index 0000000000000000000000000000000000000000..b0a5dd60dc21f5afc16a0dec9ecd566e452edc91 GIT binary patch literal 2379 zcmciCZA{fw0LSsm8d3;=Iwk>ZM~@ZF3*4e|L1VW+uzq$ z*ZN$h>tDxXe&OW|nwPuxRr5JJP~@Ln{#*NGqa$xlKCNz@>~I1f`K0S;wG(tGR|a?I zI3XPybZBFhKddTShZkh|-5GJ({n~cFCwfkMMz1=N*DvY?-%tA&PT!DGdfdP0i)ne+ z@hKj6`d-eUELMI`3i%wXTyfon# zM=$;{OC?Tb>Lp*rss}!c(Mj*!P)oZ?sk!etBjUUWM-#SStWxqyE<9r zcz4R&tWfo^+b8o9mx;V9xiWt~L_BgPUOxKmWwAzO>&M2=h{p%x^xAih3%O%X7wkW+ z3M((_Cz?J~MXSHm#YF?E_}6Q)B<%xHa^buzi#jCA#>Qm%Y@2v$_-noHn**w%=Y(GW z`SYrB^N@Cql&h*WyY+^yR8^f=t9><#ly5#?*W_GRHJ5AT#-u2*@pPWtbnBX^dp|)w zGjm>S-ajv&ofs4KO*3-K$eW^};-+pqctUMWyQH_Z45_B53Ef<>TQ$!f(=Faw)pBm1 zZguCY*3l;2c4e`8;Yh!H(Vr)_cWsj|sRXg3rdYNgo);atUb%C}jOdK>$X%6M>B|A*`)gF59_^0pHxA?Awi*Gw|l}P z+;{K^1nRcBT!BEd;|dJ~Ivau_?02F4CEDI&_`2c)f&TWOCH9`YuEOV9Uv1u6^Ey6P zp}7|p`CJ91=2BWkYBIU|!sW{pf6 znKv?VWah}!k+~z2M`n*qADKUr03-uelLD*B0g?nH3rHG}JRpfcGJ&K5$pw-OBpXON zkbEEsv6_q^DY2TIAW1>8f}{n>3z8TlGe~NX+#tz8vV)`t$q$ksBtuAwtR_cDl8`JR zX+rXZBnrtCk}4!uNV1S@A?ZT$g(S>sGKQqgYI24o&1$lSqz%a%k~k!DNa~Q>A<09s zhole5ACf?;$sm$KtH~jfM61anl13ztNFtF;BB?}ji6j%rCX!AhpGZQHj3Ozunw%m@ zwVJFVX|^2SkNXjVd1Qo4W~PKCY%?)FLS>C z>6#0TQZm1OlnVTQ5y8O32!zZ)$jJ2n|Fm}u4FCVHUckum|Nq;uKkB@H%gRct^ z2Lo|<2+(i{2qEkw9-vCFlYT(;{0D+K7M=|t8stO}4RR)k200Z(gPaSZK~4tIAZLSV apwmG#$oU`|;@ zhGvVQk_wrKh~{WcDN2$`xS>(#ZeX~vnF$&Y>LFfYH$oy#!8hx0#iLzBjZ z$lu1(zQe=(Y9C(vX!|X5jlW*@)it$zegnPY{iAB-z7$#Y;_oIa;-FqVy40*0P%0mL zFIH>4LT^kQta9e` z)L%w+Rhx#l(VP8Rsx9HQb?#j*mDl2|&Z{U_TWi;nt-A}=*QYCFTXvbrFWMv97Z#X; zp(V0oeYh&jcujW3W|*QelVw-nII}zERsBuFr_7!X6ZM|zv1+eZAHDZTq}ungOCM}= zMI9Vj*A!2`EQfmhVh#^HEG4b?n;$~A$PvjlrQS=WwEQjeqid5sS}@HVD_g9OfAXd( z%TLoky)spmXAaXRlH%0K^lti8R3~*hp_M)}DBM&;hRfN`zUG{tul(FpOr`y=RbD-5 zeyKPqzm^o4s@+9$e)|b^A-i0gWjocyg@yXk+|}xGa+ba_VuAWCcD}yaJxTo@I9^vj z@{+mMFj`-)lW4A2C(4a;QRe26DEVVgd*jM&FYdKoMwWPq$ASx{#*7P6b4qbJbv_Z6P|rjciZ}gd17Ii?*4%?J(3G_&y3gAld&sxuQAE0 zcVLE&=-p36T;3v)9VVH`;-wPh6>Fk$W=nKcuzC8!#rm0&J}PETn(nhXNW~5xru)9v zSoI6w&!M=yg;9Vj^T$|0%tdlaVY>643Q6$gi z&oT*P*2sv=;pVxRLOpUpni|z1OOH+*rp9>9*JC5Qsj*e#b)sJ@mAF4zCwY3Pq;>v! zLd7Zd{CiO{@jJJfl-gbkf&y-FfRhYPsE?EtfjHmio>+jhyfI-g^I;m^kUx+dc#0B*H$u2HB@?oZW49 zJpLl?-}hpb{j9SWt8e|1{p)UbLQU6lWKSZy64{r?&P4VmvOAIeiR@5hk0QGi*{6|SL5B0Ct_!^kd1_A#=Pk-h9_cQdk|9qo=r z_B67qk$sKqY-DdEyBpcx$PP#LII_!;eU9vOWUo8g-Hz;cN4w*ZJ&){qN4xKlosaB& zWcMTcA87#60i*?vwg*TPkS-u?K>C0*0_gwIHYq(>yX|d&12{u(msa%Aq`~cAkspH z9wJR-=pxcahCVvlMk1X=T8Z=$X(rN5q@4`?L>kJ_QKY2|Jw=+z&{ap@3|&Ur%+O~?+h~SPBduoWHPURP+eo{Sej^P> zI*zm)={eGLr0b5h?F@Z)w2f!zyrXSBL+_F1Bi%>ZkMtk80gyWYxdo7W0J#Z}yTH-j z2FQKjXm13DJHgT33JmuGax*a84an`la6ce71j8MH+!Dw=f!q|xU4h&d$bI2xZw%zl zaJ07ua&I`=n*+H!klTad{y=UJhC2kgMHuc84gWNN6N7u>!2hBeoH1+Jg)5+dF{xKZ`LIQ&PV}gSNf&+r; F{Ry|ZcJBZH literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/East-Indiana b/lib/pytz/zoneinfo/US/East-Indiana new file mode 100644 index 0000000000000000000000000000000000000000..4a92c06593d33d3969756f482e5d3d4b773984ef GIT binary patch literal 1675 zcmdVaT}+K}0LSs?kk@(LFc&RW7jAmD%q$zL)}m8hP9)@yXes=a&Q2uH1yVP-Da?{F zud{4KTr|uuW;Thu)jw}D*7heCne9CO-+%60xN+k-d!Em8&Q9xG{Ju}1pk!mR^T#p5 ze8S1G-kjV=y5`b!I@UdY4Q zNkG1>nd`pGnkC;CPIEtfo1#CP{~m6O)ZQ6SIgMfMtL;_k2|<~Wo+dK-&+5#$H7c{C zUT2M+ud*(e=>>f;YT>aunf+{@@K=}0oU73yca>i*YKRbvQxoKp%8z1cL z#d23$l&C4plDiK(Vu;VahDQ9p8GJi<9X4dx@PF{~yp}nR<9XLF`64{;LbEf{-jA`@ z30$2?o_Fu2Z)&zb;H0ISbE!F(n{!dX$uRdB<}(hTy+Yvcb1O1mwsRX8{44VdJg;zQ zxEYxrGC^d9$P|$|TFoSpSt8T4nt37`W$BdKmtexND5Yy1CoT*WPzk%HF+S3AekVkAh{sP zAlV@4Ao(B(AsHblSxrtzQdW}{l9tuvg(QY#hNOn%h9rk%hopz(ha`w(h@^<*h$Lw> zSt4m#O`b@iR+A}`Dv~RbERrpfE|M>jFp@EnGLkcrwAEydq-`~MBZ*s0=1A&D?nv@T o_DK3j{>UT1`A?q#qs^ls#XK5f{WIf};}c{3NlEcZ@rk2<0ei4tkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrHf!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj->ozrL!U%(GouMi>H9_XT=}`?E+~mJT0XO*zH~R_6OYt*7Fr~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqyMS(m14ZNb8W^Aedx|s_=_=Azq_0S0ka;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ1axWk^19CSY zw*zuNAU6bZMZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C%J%>V!A zFflLy$p{9P|NpBp7&-p`FHT@!@$n5|@CXKCmk^+S2nZo;D?3mn*w!CVJ^z8AxBx^rPJ#dc literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Indiana-Starke b/lib/pytz/zoneinfo/US/Indiana-Starke new file mode 100644 index 0000000000000000000000000000000000000000..cc785da97de0a5614613f9ba6e502d7dc5f525b5 GIT binary patch literal 2437 zcmd_rUrg0y9LMo52q6eaCWd5SS}_9&Y$$E8wFP5`#6Nk!KM+Q0h-E%1kf_n))+8^Q zrA@&a>LQnGhSpSU3r(rDnlm&BQVB4LS;(VtK}zTJydN*KKlgpv@4U_qC|931-bH24 zPm{k~i2a0z+hrf#$7uUfzb{Ge{`7aXXLF?9yX%7b=5?PwJ9$u@EeSQ}^Uq7$#M9>c zw4>54jiw{IPCB~YGC%kZ>kB8=nv0z~^`-r9s?O#r{o|H3s;jSGmp;5`X5Z>v#V+jV%yK@)MJLPs9kW=8MdCQ)_e=I$-!GN!7) z+*4K{V;82IXivI~dpy?MJ0(_PCe2XeM-EGD;CK~#BSzoXeM?Pfy{Yg2{E~`0bWz9e zJ+3BJj+O^D?NyWVugl~WpP2{K&dEc$yUoMVhb7^WO(wzDs;7i4FHdg6bM^e=6qC!1q#~3v?BU3+JF{tKE zh}YiyHsu`-&;@Bts^ChTEQtHgEcilXq3?)U)X*b^owcUuwHA4%dA%uKx=$9@7nx`C zPU@1HD)rpd2EC-TP%Vwvte53vs%8Dlb!kGpDm|U6%R&NF*?}azqW7|TVTVUvJmWVj zD--3V#%{B!AVSpEQ)YGAfUH^dzF8aHD&@0lOu4ULSEe_p%FZ)-UCd^+uKAFz8d|Q_ z*KgMw+H=*$>I(fzQj%7D}XDFjjpq!dUk zkYXU!KvxdlP!G-)grg!zNjPeP6a}dYQWm5xPFonHGEQ3>j@lr_L8^n42dNKIAdU(l zCE}RQI(-x4Uf=*jPjv69GM5>6C5ve0mNTiZTDUn(t#YC#9hsqsHoGHl%u9dQ8}uLl$E2dNMSiDiFS(=vDTtKMt`{2xIWL%`mewGuXVT2=W)C3?*6&g zGu#;6Q11QXk!auH@OauDUT1E%SI-AItM*sz^*(vZ$obqqxb5iv6`N<|TCtppSG-{^BC#z|{`(*m}qblp%FLJ}lvnu=W zXL93#!|JAX&)od@9+lJZgUQ`Epl|V?HZvAHt8dMC#pD%i)_D^jO1`f_=MTRu1wYT$ zGY5A_;ZRr=^+l!l=qy#zwNOg;WU8`A#FTIPQ&j|l=C-BZDSzHfGpl@1%}z=+mD9h` zl_S5IIpgo^xo6IqdEfTws)1kS_V>2yJ9d9AcRs&fSGOIJ`Q6*pUG;mVrfHq3Eoqm! z+8R|i)^7quYjt2~mkFlM*TJK$X2EE#UiiXJS@dPK3T(`qd#>(wm^}sVb4B z509v2S?LmP_*jL<$7H$xHMRW5$!10N8NK54C9`s(UpF2aGOLDn>DArG%zcBcdQHuY`qeSx3Rpiqui6)Mz=$qNHapVit(mU6* zp1q_WXwNba4h-qGh6y9PkLic}+H7jur#EMuGF$39^_Gc?(q7rC+J{d{M`nlW7(6JQ zmmAe1eLeE%g(|hRD*%BxWDi4zu3!V_ZfaQ7GpQac98ub z8*8Z^-75-67jU_J?c`*&(t;WRJ)ukzFF& zMD~en6xk`VRb;PDyIEwn$aazaA{$0_jBFX%GqP!9*T}Y!eIpx3cJ8!WNA~Wtn@4t! zY#-S_(g36bNDGi2AWcBJfV2VW1JVej6G$tNUN~(tkZvIDK>C3+1nCIU5~L?cQ;@D8 zZ9)2iGzRI6)3yfbjng&<>5kL32k8&eAf!V`i;x~6O+vbav88`R6X~bZHWcY7(o&?SNK=unB5lS0YhQgfzJ43s U!cawVd2wlBsI08Gthm(o7dPd97XSbN literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Mountain b/lib/pytz/zoneinfo/US/Mountain new file mode 100644 index 0000000000000000000000000000000000000000..7fc669171f88e8e1fb0c1483bb83e746e5f1c779 GIT binary patch literal 2453 zcmdtjeN5F=9LMnkqQH%ZQ;8vb8FWLuL3o9$5k`URY6U)Y(?UE3#rVT< zR*YFDf|Ayni4SWwHq3b&mt~`eO%k}b^FAy8>5u+w>&t$;e%!&IpEvGR z-Zfd`A2->2!ozi$hxe(9ANJ?zJ^i7o`=tck^V$z;Z>?YNYndW?3oq&3_WkO^wg^2m z=l6!8>R53#-KR(Ab%;NrJ^EUhPh1;)Mvi^&5##48Hesdb*xmIy=m9w`H%Z)*G*8CPE>zRQ9WpLBQN{f_SI2)D zt`dgA^o&zKs+or`>sx!ys9C-l^0w`V)a(@jIcM!h;`Zz>FGv zB*zAkG<-@YUv`T-2lnZda}6rB>qVV*v`nQp)#;2^7O2d+7MZninwsxiBNvp7s_euE ze9|x>fuGjy37}>mM5fY_lmETdpuf~XP;K(-=s*-%&&xJFiNiU4~kX2Bl3~q z1ER8JNIp8yCaP+V$<*7-ulRIbVydb;yRY*i1vPNW)$SRR#BI`sJimcRXmWr$uS*+Ep7FjN`USz?@imhhJ$eNKwBdbQc zY+hJ5XBG~uoMY+8+L6U0t4EfPtlw%1fKBc$bvVj{)Q6)$NQJDXL`aRS zrbtMYILd_72`Lm(DWp_Lt&n0N)k4aJ)C(yXQZb}tNX@LKXh_vK%7)a9qi{&&I7)}q zj-z-;^^o!*^+O7XRM2Wlh}6((iilLvYRZVzk)x1EC6Q7hwM2@ER1+yDQct9yNJXut zq)1Jzrl?3&t){FVrON6L@X jUtDkg|1SRy^Iu`1`R|b8nxB@HmXYGh%uLHn%W(V&DI1f# literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Pacific b/lib/pytz/zoneinfo/US/Pacific new file mode 100644 index 0000000000000000000000000000000000000000..1fa9149f9a9207a9b9838141088663ebe669f250 GIT binary patch literal 2845 zcmd_reN5F=9LMpCq6kW!Oq2-iq$YxjfTAdt`82>pRIVgu_>jOb4HZHyLt6A;%{b(I z=!w3PYq>IX%w|!9Zn;{s5}NZVB1>f|sc4Bx_jcar-~Qs*Kc?5=Y4;?3kvcQ zJpVY|>^EG_XZG^mx6D4O-cOZx>%xq@7$ZC1ykWlG6d{d+udixcGE^P&73$-Dc59yMJf5rn`Z`tl4y0)R2QkXCBU%T% z+)H&?*Hd?0JDf{vy-plap$(OC z$EP)__wJ9idZSX^xyk50>xO2QNRVh9q9r_s{rT4GlZ0qhhL5 zl&?*qL&{Wi^Y;>SVW}EkzfVRqm70-NTO_u2u^CnRl*DbBV&d~(*9k>K%;=P2Jtnie zNsP+UV-s4J>jks+A=v`pFufD$I)3t14R5>ajibn!-b>D6CBvXY5kN z{$MFdYA_|u7iC>|wOLnxMAmndo2RR43U!RgtnHbwvt`R2C^MQd^|BNOh6& zBK1WI?6eg|O6;^XMv9D787VVTXQa?brIAu2wML4KR2wNbQg5W-NX3zoJ8jL8q9avD z%8t|>DLhhnr1VJbk>VrON6L@XA6WpWT>)eXoOTV6MR3|xK$Zbn2V^0Tl|YsPSqo$_ zkkvqz16dDbL7a9)kR@^2H9;1|X;%eV7GzzJg+W#ZSsG+*ki|h(2U#9ueUJr0RtQ-l zr(Gjtk(_pwkY#e(bwU;jSt(?xkhMY<3t25>xsdfj77ST2WXYU%&5%WN+EqiA&1u&S xSvX|nkflS`4p}^8^^oQBxUKu&O<#yz#3Z|nBhp95Cd9^#NRN+?jgO5B`5OyZo4^17 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Pacific-New b/lib/pytz/zoneinfo/US/Pacific-New new file mode 100644 index 0000000000000000000000000000000000000000..1fa9149f9a9207a9b9838141088663ebe669f250 GIT binary patch literal 2845 zcmd_reN5F=9LMpCq6kW!Oq2-iq$YxjfTAdt`82>pRIVgu_>jOb4HZHyLt6A;%{b(I z=!w3PYq>IX%w|!9Zn;{s5}NZVB1>f|sc4Bx_jcar-~Qs*Kc?5=Y4;?3kvcQ zJpVY|>^EG_XZG^mx6D4O-cOZx>%xq@7$ZC1ykWlG6d{d+udixcGE^P&73$-Dc59yMJf5rn`Z`tl4y0)R2QkXCBU%T% z+)H&?*Hd?0JDf{vy-plap$(OC z$EP)__wJ9idZSX^xyk50>xO2QNRVh9q9r_s{rT4GlZ0qhhL5 zl&?*qL&{Wi^Y;>SVW}EkzfVRqm70-NTO_u2u^CnRl*DbBV&d~(*9k>K%;=P2Jtnie zNsP+UV-s4J>jks+A=v`pFufD$I)3t14R5>ajibn!-b>D6CBvXY5kN z{$MFdYA_|u7iC>|wOLnxMAmndo2RR43U!RgtnHbwvt`R2C^MQd^|BNOh6& zBK1WI?6eg|O6;^XMv9D787VVTXQa?brIAu2wML4KR2wNbQg5W-NX3zoJ8jL8q9avD z%8t|>DLhhnr1VJbk>VrON6L@XA6WpWT>)eXoOTV6MR3|xK$Zbn2V^0Tl|YsPSqo$_ zkkvqz16dDbL7a9)kR@^2H9;1|X;%eV7GzzJg+W#ZSsG+*ki|h(2U#9ueUJr0RtQ-l zr(Gjtk(_pwkY#e(bwU;jSt(?xkhMY<3t25>xsdfj77ST2WXYU%&5%WN+EqiA&1u&S xSvX|nkflS`4p}^8^^oQBxUKu&O<#yz#3Z|nBhp95Cd9^#NRN+?jgO5B`5OyZo4^17 literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/US/Samoa b/lib/pytz/zoneinfo/US/Samoa new file mode 100644 index 0000000000000000000000000000000000000000..1d7649ff71d07a158d69ab0d46a60f89c28683a3 GIT binary patch literal 272 zcmWHE%1kq2zyPd35fBCe79a+(1sZ_FMAqLNzb=JtkkU3VU}Rzj%5AJ*VEF$({s)Lm z4PamalN?}@$HzB>!7n(3!3l_iffxZo2-^#Dg86n=g0ln(2GlGSx2>7R^6VI`N3zh;KBpBy5pu?bG%PFn!04+-VxJ z#TsF?L|U}GqM{ePVq&ALmW2=gcE2b}h>TMGkKjFAh{DsRNPT~T`|@s6tPpt)u~di> zk11Y=$~sfB5Vkat`up=dhbjH%^AgXm&+}b>Z9vp=>h-DMt-eN|3VmvY&lk`~Kb*fz zKd$+|=WYW3?rNL7cA&``IC^k}G? zM7l)Ul0qC;JXc}v4tWhyHU1gU!H+6IwThI4->nmT?b9VN5cDD2G|NRBF zw>D*Z?s*A|B_4Ro9&PKi$96W@8R_ag5S~VM`r@nYlPmlj8@5rxuX7GEZ^(L{k(A6ln1 z$K$lNDM{=4^X>VP5EVDZ+WO>QJ$pS4{Ff79+YpIAfP6*VRvvpq#)YI^9rn&YMB z8%Nc$*s0d(UcEdY(B6-BsO`%Rd*yJE+DEJG)s{rPb|}X>DhsW%DcWAoNU$#dU-m}0 z$GX!a)qU%l^@J^`=bO*1_vW1Tog20NGZQ*+?2-b<#lcKoUVRK~h0-L6SkTLDE6;K@vhT zLQ+CTSl^j~E| Bn416q literal 0 HcmV?d00001 diff --git a/lib/pytz/zoneinfo/Zulu b/lib/pytz/zoneinfo/Zulu new file mode 100644 index 0000000000000000000000000000000000000000..c3b97f1a199421d6d9625b280316d99b85a4a4e8 GIT binary patch literal 127 ucmWHE%1kq2zyORu5fFv}5SstkJ=? z!Q9a_a=|1E(F`4%HgPS*S5s0GdzBW_I;Z#h-geP=(N%xve?Dg%818=GCn+U!T5r!k zp2qfnczG_{m+x&}v>!$5LS@CrKdJW?d1U3=U#eB{j*B;NN9u6QjyHo{+MdLuyyRuVz=}cJ;}*W9HM6Z*=*-GP8ThR$Z~? z9kVBEnckcCg83{lTklJoYCeyiq$|DiWPk7=eIPPb4%AOn2ZQ3|;PHX#i&u;s>iUZu zQa5zfoO9-I+$nt|xzZf%yjvfODK^JFEA@$x#pZ-wpuh92m+vdm^~vf2Ikn+sRb4(q zP8XypUF4NBnGdS7xzX}NLN|3TwUwNo7^Q3CBh8QfTj~p8!RBJyYx+`?tLD;ghxJc2 zRp#>19lEx%)LhwJrG73sBxXgay1Hb$T${gK)nygRFH`5LUlViWw;_+H-=kBczT30< zkKkCj-fXhIUO&m)xG-4%d3=!h>%bk_x3iP+ulH-ua-V6Ce?~WaR+~oRQvvEPX*^b| zCUK{wY0tf?>8tJKmX>SOEt{8_K(k0S*9)b^iB&qNB13L1%hSOd7MPZAP1CIk(#>si zAJVNe<4v2%-E~MpxM@4Eg}yz!xoOuWT(xgjYdSP+t~y)`l#XX=Ri|$+%N={ZR-s$I zk~>#!QJu3r=B}5PsxHZAP1orq`tF#0=AMyn=zBxfnXvA&beQim2@g!x;ni!U`=$Q6 zM|r+PR3)j%qD+a})=x#}j*^~B+o@g|8K(C$*HxeR1k-o?Nfi^;!}RN2uKG6(G6On( zrw7#hYzE%=L=UR`)(rl>NXM33k^6SNsPA9$jSP9`aUGYnRfguxR}UmElVNF(so~Mt zGGh2JHKMNA#79om@l}gWLeNm1ux+LpS=&{QdbdDEAB|Jqc{60pjxH*3idV)K2B>kd z(K3EcjhfJ@l_Vt}P)RrHf!UjW>RRSp0w|(nd~dpDQl|CBh`!bl)O^&X!%T? znzr0bEgGYhce^~6KSMnpStw6rcvV_Zj->ozrL!U%(GouMi>H9_XT=}`?E+~mJT0XO*zH~R_6OYt*7Fr~ zUy+SPb{5%MWN(qpMRph2USxlf4R+ccMz+{#_ZZn^WS5a`M)n!mXk@36tw#15*=%ID zk?ltI8`*HD-Em~gop#TWO-FX!X}2BOcVy#{okzAF*?VO3k==LN?ML<>X#mmzqyMS(m14ZNb8W^Aedx|s_=_=Azq_0S0ka;NP%(sZZoI?{He??~g3 z&LgcydXF?8=|0kar2ohb;IwxDatk=^J%HQ1axWk^19CSY zw*zuNAU6bZMZQ``|338(#cM5W=AomJ#vmkd1a=UnL`V#q{9xs9Rrirn)O@y~k SRPU&s5#C>> from tzlocal import get_localzone + >>> tz = get_localzone() + >>> tz + + +Create a local datetime: + + >>> from datetime import datetime + >>> dt = tz.localize(datetime.now()) + >>> dt + datetime.datetime(2012, 9, 11, 14, 43, 42, 518871, tzinfo=) + +Lookup another timezone with `pytz`: + + >>> import pytz + >>> eastern = pytz.timezone('US/Eastern') + +Convert the datetime: + + >>> dt.astimezone(eastern) + datetime.datetime(2012, 9, 11, 8, 43, 42, 518871, tzinfo=) + + +Maintainer +---------- + +* Lennart Regebro, regebro@gmail.com + +Contributors +------------ + +* Marc Van Olmen +* Benjamen Meyer +* Manuel Ebert +* Xiaokun Zhu + +(Sorry if I forgot someone) + +License +------- + +* CC0 1.0 Universal http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/lib/tzlocal/__init__.py b/lib/tzlocal/__init__.py new file mode 100644 index 00000000..df7a66b9 --- /dev/null +++ b/lib/tzlocal/__init__.py @@ -0,0 +1,7 @@ +import sys +if sys.platform == 'win32': + from tzlocal.win32 import get_localzone, reload_localzone +elif 'darwin' in sys.platform: + from tzlocal.darwin import get_localzone, reload_localzone +else: + from tzlocal.unix import get_localzone, reload_localzone diff --git a/lib/tzlocal/darwin.py b/lib/tzlocal/darwin.py new file mode 100644 index 00000000..86fd906f --- /dev/null +++ b/lib/tzlocal/darwin.py @@ -0,0 +1,27 @@ +from __future__ import with_statement +import os +import pytz + +_cache_tz = None + +def _get_localzone(): + tzname = os.popen("systemsetup -gettimezone").read().replace("Time Zone: ", "").strip() + if not tzname or tzname not in pytz.all_timezones_set: + # link will be something like /usr/share/zoneinfo/America/Los_Angeles. + link = os.readlink("/etc/localtime") + tzname = link[link.rfind("zoneinfo/") + 9:] + return pytz.timezone(tzname) + +def get_localzone(): + """Get the computers configured local timezone, if any.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = _get_localzone() + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = _get_localzone() + return _cache_tz + diff --git a/lib/tzlocal/tests.py b/lib/tzlocal/tests.py new file mode 100644 index 00000000..49dd0aef --- /dev/null +++ b/lib/tzlocal/tests.py @@ -0,0 +1,64 @@ +import sys +import os +from datetime import datetime +import unittest +import pytz +import tzlocal.unix + +class TzLocalTests(unittest.TestCase): + + def test_env(self): + tz_harare = tzlocal.unix._tz_from_env(':Africa/Harare') + self.assertEqual(tz_harare.zone, 'Africa/Harare') + + # Some Unices allow this as well, so we must allow it: + tz_harare = tzlocal.unix._tz_from_env('Africa/Harare') + self.assertEqual(tz_harare.zone, 'Africa/Harare') + + local_path = os.path.split(__file__)[0] + tz_local = tzlocal.unix._tz_from_env(':' + os.path.join(local_path, 'test_data', 'Harare')) + self.assertEqual(tz_local.zone, 'local') + # Make sure the local timezone is the same as the Harare one above. + # We test this with a past date, so that we don't run into future changes + # of the Harare timezone. + dt = datetime(2012, 1, 1, 5) + self.assertEqual(tz_harare.localize(dt), tz_local.localize(dt)) + + # Non-zoneinfo timezones are not supported in the TZ environment. + self.assertRaises(pytz.UnknownTimeZoneError, tzlocal.unix._tz_from_env, 'GMT+03:00') + + def test_timezone(self): + # Most versions of Ubuntu + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'timezone')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_zone_setting(self): + # A ZONE setting in /etc/sysconfig/clock, f ex CentOS + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'zone_setting')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_timezone_setting(self): + # A ZONE setting in /etc/conf.d/clock, f ex Gentoo + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'timezone_setting')) + self.assertEqual(tz.zone, 'Africa/Harare') + + def test_only_localtime(self): + local_path = os.path.split(__file__)[0] + tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'localtime')) + self.assertEqual(tz.zone, 'local') + dt = datetime(2012, 1, 1, 5) + self.assertEqual(pytz.timezone('Africa/Harare').localize(dt), tz.localize(dt)) + +if sys.platform == 'win32': + + import tzlocal.win32 + class TzWin32Tests(unittest.TestCase): + + def test_win32(self): + tzlocal.win32.get_localzone() + +if __name__ == '__main__': + unittest.main() diff --git a/lib/tzlocal/unix.py b/lib/tzlocal/unix.py new file mode 100644 index 00000000..76c214dd --- /dev/null +++ b/lib/tzlocal/unix.py @@ -0,0 +1,115 @@ +from __future__ import with_statement +import os +import re +import pytz + +_cache_tz = None + +def _tz_from_env(tzenv): + if tzenv[0] == ':': + tzenv = tzenv[1:] + + # TZ specifies a file + if os.path.exists(tzenv): + with open(tzenv, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + # TZ specifies a zoneinfo zone. + try: + tz = pytz.timezone(tzenv) + # That worked, so we return this: + return tz + except pytz.UnknownTimeZoneError: + raise pytz.UnknownTimeZoneError( + "tzlocal() does not support non-zoneinfo timezones like %s. \n" + "Please use a timezone in the form of Continent/City") + +def _get_localzone(_root='/'): + """Tries to find the local timezone configuration. + + This method prefers finding the timezone name and passing that to pytz, + over passing in the localtime file, as in the later case the zoneinfo + name is unknown. + + The parameter _root makes the function look for files like /etc/localtime + beneath the _root directory. This is primarily used by the tests. + In normal usage you call the function without parameters.""" + + tzenv = os.environ.get('TZ') + if tzenv: + try: + return _tz_from_env(tzenv) + except pytz.UnknownTimeZoneError: + pass + + # Now look for distribution specific configuration files + # that contain the timezone name. + tzpath = os.path.join(_root, 'etc/timezone') + if os.path.exists(tzpath): + with open(tzpath, 'rb') as tzfile: + data = tzfile.read() + + # Issue #3 was that /etc/timezone was a zoneinfo file. + # That's a misconfiguration, but we need to handle it gracefully: + if data[:5] != 'TZif2': + etctz = data.strip().decode() + # Get rid of host definitions and comments: + if ' ' in etctz: + etctz, dummy = etctz.split(' ', 1) + if '#' in etctz: + etctz, dummy = etctz.split('#', 1) + return pytz.timezone(etctz.replace(' ', '_')) + + # CentOS has a ZONE setting in /etc/sysconfig/clock, + # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and + # Gentoo has a TIMEZONE setting in /etc/conf.d/clock + # We look through these files for a timezone: + + zone_re = re.compile('\s*ZONE\s*=\s*\"') + timezone_re = re.compile('\s*TIMEZONE\s*=\s*\"') + end_re = re.compile('\"') + + for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'): + tzpath = os.path.join(_root, filename) + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rt') as tzfile: + data = tzfile.readlines() + + for line in data: + # Look for the ZONE= setting. + match = zone_re.match(line) + if match is None: + # No ZONE= setting. Look for the TIMEZONE= setting. + match = timezone_re.match(line) + if match is not None: + # Some setting existed + line = line[match.end():] + etctz = line[:end_re.search(line).start()] + + # We found a timezone + return pytz.timezone(etctz.replace(' ', '_')) + + # No explicit setting existed. Use localtime + for filename in ('etc/localtime', 'usr/local/etc/localtime'): + tzpath = os.path.join(_root, filename) + + if not os.path.exists(tzpath): + continue + with open(tzpath, 'rb') as tzfile: + return pytz.tzfile.build_tzinfo('local', tzfile) + + raise pytz.UnknownTimeZoneError('Can not find any timezone configuration') + +def get_localzone(): + """Get the computers configured local timezone, if any.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = _get_localzone() + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = _get_localzone() + return _cache_tz diff --git a/lib/tzlocal/win32.py b/lib/tzlocal/win32.py new file mode 100644 index 00000000..63445cd7 --- /dev/null +++ b/lib/tzlocal/win32.py @@ -0,0 +1,93 @@ +try: + import _winreg as winreg +except ImportError: + import winreg + +from tzlocal.windows_tz import win_tz +import pytz + +_cache_tz = None + +def valuestodict(key): + """Convert a registry key's values to a dictionary.""" + dict = {} + size = winreg.QueryInfoKey(key)[1] + for i in range(size): + data = winreg.EnumValue(key, i) + dict[data[0]] = data[1] + return dict + +def get_localzone_name(): + # Windows is special. It has unique time zone names (in several + # meanings of the word) available, but unfortunately, they can be + # translated to the language of the operating system, so we need to + # do a backwards lookup, by going through all time zones and see which + # one matches. + handle = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + + TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation" + localtz = winreg.OpenKey(handle, TZLOCALKEYNAME) + keyvalues = valuestodict(localtz) + localtz.Close() + if 'TimeZoneKeyName' in keyvalues: + # Windows 7 (and Vista?) + + # For some reason this returns a string with loads of NUL bytes at + # least on some systems. I don't know if this is a bug somewhere, I + # just work around it. + tzkeyname = keyvalues['TimeZoneKeyName'].split('\x00', 1)[0] + else: + # Windows 2000 or XP + + # This is the localized name: + tzwin = keyvalues['StandardName'] + + # Open the list of timezones to look up the real name: + TZKEYNAME = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones" + tzkey = winreg.OpenKey(handle, TZKEYNAME) + + # Now, match this value to Time Zone information + tzkeyname = None + for i in range(winreg.QueryInfoKey(tzkey)[0]): + subkey = winreg.EnumKey(tzkey, i) + sub = winreg.OpenKey(tzkey, subkey) + data = valuestodict(sub) + sub.Close() + try: + if data['Std'] == tzwin: + tzkeyname = subkey + break + except KeyError: + # This timezone didn't have proper configuration. + # Ignore it. + pass + + tzkey.Close() + handle.Close() + + if tzkeyname is None: + raise LookupError('Can not find Windows timezone configuration') + + timezone = win_tz.get(tzkeyname) + if timezone is None: + # Nope, that didn't work. Try adding "Standard Time", + # it seems to work a lot of times: + timezone = win_tz.get(tzkeyname + " Standard Time") + + # Return what we have. + if timezone is None: + raise pytz.UnknownTimeZoneError('Can not find timezone ' + tzkeyname) + + return timezone + +def get_localzone(): + """Returns the zoneinfo-based tzinfo object that matches the Windows-configured timezone.""" + global _cache_tz + if _cache_tz is None: + _cache_tz = pytz.timezone(get_localzone_name()) + return _cache_tz + +def reload_localzone(): + """Reload the cached localzone. You need to call this if the timezone has changed.""" + global _cache_tz + _cache_tz = pytz.timezone(get_localzone_name()) diff --git a/lib/tzlocal/windows_tz.py b/lib/tzlocal/windows_tz.py new file mode 100644 index 00000000..0790bb48 --- /dev/null +++ b/lib/tzlocal/windows_tz.py @@ -0,0 +1,542 @@ +# This file is autogenerated by the get_windows_info.py script +# Do not edit. +win_tz = {'AUS Central Standard Time': 'Australia/Darwin', + 'AUS Eastern Standard Time': 'Australia/Sydney', + 'Afghanistan Standard Time': 'Asia/Kabul', + 'Alaskan Standard Time': 'America/Anchorage', + 'Arab Standard Time': 'Asia/Riyadh', + 'Arabian Standard Time': 'Asia/Dubai', + 'Arabic Standard Time': 'Asia/Baghdad', + 'Argentina Standard Time': 'America/Buenos_Aires', + 'Atlantic Standard Time': 'America/Halifax', + 'Azerbaijan Standard Time': 'Asia/Baku', + 'Azores Standard Time': 'Atlantic/Azores', + 'Bahia Standard Time': 'America/Bahia', + 'Bangladesh Standard Time': 'Asia/Dhaka', + 'Belarus Standard Time': 'Europe/Minsk', + 'Canada Central Standard Time': 'America/Regina', + 'Cape Verde Standard Time': 'Atlantic/Cape_Verde', + 'Caucasus Standard Time': 'Asia/Yerevan', + 'Cen. Australia Standard Time': 'Australia/Adelaide', + 'Central America Standard Time': 'America/Guatemala', + 'Central Asia Standard Time': 'Asia/Almaty', + 'Central Brazilian Standard Time': 'America/Cuiaba', + 'Central Europe Standard Time': 'Europe/Budapest', + 'Central European Standard Time': 'Europe/Warsaw', + 'Central Pacific Standard Time': 'Pacific/Guadalcanal', + 'Central Standard Time': 'America/Chicago', + 'Central Standard Time (Mexico)': 'America/Mexico_City', + 'China Standard Time': 'Asia/Shanghai', + 'Dateline Standard Time': 'Etc/GMT+12', + 'E. Africa Standard Time': 'Africa/Nairobi', + 'E. Australia Standard Time': 'Australia/Brisbane', + 'E. South America Standard Time': 'America/Sao_Paulo', + 'Eastern Standard Time': 'America/New_York', + 'Egypt Standard Time': 'Africa/Cairo', + 'Ekaterinburg Standard Time': 'Asia/Yekaterinburg', + 'FLE Standard Time': 'Europe/Kiev', + 'Fiji Standard Time': 'Pacific/Fiji', + 'GMT Standard Time': 'Europe/London', + 'GTB Standard Time': 'Europe/Bucharest', + 'Georgian Standard Time': 'Asia/Tbilisi', + 'Greenland Standard Time': 'America/Godthab', + 'Greenwich Standard Time': 'Atlantic/Reykjavik', + 'Hawaiian Standard Time': 'Pacific/Honolulu', + 'India Standard Time': 'Asia/Calcutta', + 'Iran Standard Time': 'Asia/Tehran', + 'Israel Standard Time': 'Asia/Jerusalem', + 'Jordan Standard Time': 'Asia/Amman', + 'Kaliningrad Standard Time': 'Europe/Kaliningrad', + 'Korea Standard Time': 'Asia/Seoul', + 'Libya Standard Time': 'Africa/Tripoli', + 'Line Islands Standard Time': 'Pacific/Kiritimati', + 'Magadan Standard Time': 'Asia/Magadan', + 'Mauritius Standard Time': 'Indian/Mauritius', + 'Middle East Standard Time': 'Asia/Beirut', + 'Montevideo Standard Time': 'America/Montevideo', + 'Morocco Standard Time': 'Africa/Casablanca', + 'Mountain Standard Time': 'America/Denver', + 'Mountain Standard Time (Mexico)': 'America/Chihuahua', + 'Myanmar Standard Time': 'Asia/Rangoon', + 'N. Central Asia Standard Time': 'Asia/Novosibirsk', + 'Namibia Standard Time': 'Africa/Windhoek', + 'Nepal Standard Time': 'Asia/Katmandu', + 'New Zealand Standard Time': 'Pacific/Auckland', + 'Newfoundland Standard Time': 'America/St_Johns', + 'North Asia East Standard Time': 'Asia/Irkutsk', + 'North Asia Standard Time': 'Asia/Krasnoyarsk', + 'Pacific SA Standard Time': 'America/Santiago', + 'Pacific Standard Time': 'America/Los_Angeles', + 'Pacific Standard Time (Mexico)': 'America/Santa_Isabel', + 'Pakistan Standard Time': 'Asia/Karachi', + 'Paraguay Standard Time': 'America/Asuncion', + 'Romance Standard Time': 'Europe/Paris', + 'Russia Time Zone 10': 'Asia/Srednekolymsk', + 'Russia Time Zone 11': 'Asia/Kamchatka', + 'Russia Time Zone 3': 'Europe/Samara', + 'Russian Standard Time': 'Europe/Moscow', + 'SA Eastern Standard Time': 'America/Cayenne', + 'SA Pacific Standard Time': 'America/Bogota', + 'SA Western Standard Time': 'America/La_Paz', + 'SE Asia Standard Time': 'Asia/Bangkok', + 'Samoa Standard Time': 'Pacific/Apia', + 'Singapore Standard Time': 'Asia/Singapore', + 'South Africa Standard Time': 'Africa/Johannesburg', + 'Sri Lanka Standard Time': 'Asia/Colombo', + 'Syria Standard Time': 'Asia/Damascus', + 'Taipei Standard Time': 'Asia/Taipei', + 'Tasmania Standard Time': 'Australia/Hobart', + 'Tokyo Standard Time': 'Asia/Tokyo', + 'Tonga Standard Time': 'Pacific/Tongatapu', + 'Turkey Standard Time': 'Europe/Istanbul', + 'US Eastern Standard Time': 'America/Indianapolis', + 'US Mountain Standard Time': 'America/Phoenix', + 'UTC': 'Etc/GMT', + 'UTC+12': 'Etc/GMT-12', + 'UTC-02': 'Etc/GMT+2', + 'UTC-11': 'Etc/GMT+11', + 'Ulaanbaatar Standard Time': 'Asia/Ulaanbaatar', + 'Venezuela Standard Time': 'America/Caracas', + 'Vladivostok Standard Time': 'Asia/Vladivostok', + 'W. Australia Standard Time': 'Australia/Perth', + 'W. Central Africa Standard Time': 'Africa/Lagos', + 'W. Europe Standard Time': 'Europe/Berlin', + 'West Asia Standard Time': 'Asia/Tashkent', + 'West Pacific Standard Time': 'Pacific/Port_Moresby', + 'Yakutsk Standard Time': 'Asia/Yakutsk'} + +# Old name for the win_tz variable: +tz_names = win_tz + +tz_win = {'Africa/Abidjan': 'Greenwich Standard Time', + 'Africa/Accra': 'Greenwich Standard Time', + 'Africa/Addis_Ababa': 'E. Africa Standard Time', + 'Africa/Algiers': 'W. Central Africa Standard Time', + 'Africa/Asmera': 'E. Africa Standard Time', + 'Africa/Bamako': 'Greenwich Standard Time', + 'Africa/Bangui': 'W. Central Africa Standard Time', + 'Africa/Banjul': 'Greenwich Standard Time', + 'Africa/Bissau': 'Greenwich Standard Time', + 'Africa/Blantyre': 'South Africa Standard Time', + 'Africa/Brazzaville': 'W. Central Africa Standard Time', + 'Africa/Bujumbura': 'South Africa Standard Time', + 'Africa/Cairo': 'Egypt Standard Time', + 'Africa/Casablanca': 'Morocco Standard Time', + 'Africa/Ceuta': 'Romance Standard Time', + 'Africa/Conakry': 'Greenwich Standard Time', + 'Africa/Dakar': 'Greenwich Standard Time', + 'Africa/Dar_es_Salaam': 'E. Africa Standard Time', + 'Africa/Djibouti': 'E. Africa Standard Time', + 'Africa/Douala': 'W. Central Africa Standard Time', + 'Africa/El_Aaiun': 'Morocco Standard Time', + 'Africa/Freetown': 'Greenwich Standard Time', + 'Africa/Gaborone': 'South Africa Standard Time', + 'Africa/Harare': 'South Africa Standard Time', + 'Africa/Johannesburg': 'South Africa Standard Time', + 'Africa/Juba': 'E. Africa Standard Time', + 'Africa/Kampala': 'E. Africa Standard Time', + 'Africa/Khartoum': 'E. Africa Standard Time', + 'Africa/Kigali': 'South Africa Standard Time', + 'Africa/Kinshasa': 'W. Central Africa Standard Time', + 'Africa/Lagos': 'W. Central Africa Standard Time', + 'Africa/Libreville': 'W. Central Africa Standard Time', + 'Africa/Lome': 'Greenwich Standard Time', + 'Africa/Luanda': 'W. Central Africa Standard Time', + 'Africa/Lubumbashi': 'South Africa Standard Time', + 'Africa/Lusaka': 'South Africa Standard Time', + 'Africa/Malabo': 'W. Central Africa Standard Time', + 'Africa/Maputo': 'South Africa Standard Time', + 'Africa/Maseru': 'South Africa Standard Time', + 'Africa/Mbabane': 'South Africa Standard Time', + 'Africa/Mogadishu': 'E. Africa Standard Time', + 'Africa/Monrovia': 'Greenwich Standard Time', + 'Africa/Nairobi': 'E. Africa Standard Time', + 'Africa/Ndjamena': 'W. Central Africa Standard Time', + 'Africa/Niamey': 'W. Central Africa Standard Time', + 'Africa/Nouakchott': 'Greenwich Standard Time', + 'Africa/Ouagadougou': 'Greenwich Standard Time', + 'Africa/Porto-Novo': 'W. Central Africa Standard Time', + 'Africa/Sao_Tome': 'Greenwich Standard Time', + 'Africa/Tripoli': 'Libya Standard Time', + 'Africa/Tunis': 'W. Central Africa Standard Time', + 'Africa/Windhoek': 'Namibia Standard Time', + 'America/Anchorage': 'Alaskan Standard Time', + 'America/Anguilla': 'SA Western Standard Time', + 'America/Antigua': 'SA Western Standard Time', + 'America/Araguaina': 'SA Eastern Standard Time', + 'America/Argentina/La_Rioja': 'Argentina Standard Time', + 'America/Argentina/Rio_Gallegos': 'Argentina Standard Time', + 'America/Argentina/Salta': 'Argentina Standard Time', + 'America/Argentina/San_Juan': 'Argentina Standard Time', + 'America/Argentina/San_Luis': 'Argentina Standard Time', + 'America/Argentina/Tucuman': 'Argentina Standard Time', + 'America/Argentina/Ushuaia': 'Argentina Standard Time', + 'America/Aruba': 'SA Western Standard Time', + 'America/Asuncion': 'Paraguay Standard Time', + 'America/Bahia': 'Bahia Standard Time', + 'America/Bahia_Banderas': 'Central Standard Time (Mexico)', + 'America/Barbados': 'SA Western Standard Time', + 'America/Belem': 'SA Eastern Standard Time', + 'America/Belize': 'Central America Standard Time', + 'America/Blanc-Sablon': 'SA Western Standard Time', + 'America/Boa_Vista': 'SA Western Standard Time', + 'America/Bogota': 'SA Pacific Standard Time', + 'America/Boise': 'Mountain Standard Time', + 'America/Buenos_Aires': 'Argentina Standard Time', + 'America/Cambridge_Bay': 'Mountain Standard Time', + 'America/Campo_Grande': 'Central Brazilian Standard Time', + 'America/Cancun': 'Central Standard Time (Mexico)', + 'America/Caracas': 'Venezuela Standard Time', + 'America/Catamarca': 'Argentina Standard Time', + 'America/Cayenne': 'SA Eastern Standard Time', + 'America/Cayman': 'SA Pacific Standard Time', + 'America/Chicago': 'Central Standard Time', + 'America/Chihuahua': 'Mountain Standard Time (Mexico)', + 'America/Coral_Harbour': 'SA Pacific Standard Time', + 'America/Cordoba': 'Argentina Standard Time', + 'America/Costa_Rica': 'Central America Standard Time', + 'America/Creston': 'US Mountain Standard Time', + 'America/Cuiaba': 'Central Brazilian Standard Time', + 'America/Curacao': 'SA Western Standard Time', + 'America/Danmarkshavn': 'UTC', + 'America/Dawson': 'Pacific Standard Time', + 'America/Dawson_Creek': 'US Mountain Standard Time', + 'America/Denver': 'Mountain Standard Time', + 'America/Detroit': 'Eastern Standard Time', + 'America/Dominica': 'SA Western Standard Time', + 'America/Edmonton': 'Mountain Standard Time', + 'America/Eirunepe': 'SA Pacific Standard Time', + 'America/El_Salvador': 'Central America Standard Time', + 'America/Fortaleza': 'SA Eastern Standard Time', + 'America/Glace_Bay': 'Atlantic Standard Time', + 'America/Godthab': 'Greenland Standard Time', + 'America/Goose_Bay': 'Atlantic Standard Time', + 'America/Grand_Turk': 'SA Western Standard Time', + 'America/Grenada': 'SA Western Standard Time', + 'America/Guadeloupe': 'SA Western Standard Time', + 'America/Guatemala': 'Central America Standard Time', + 'America/Guayaquil': 'SA Pacific Standard Time', + 'America/Guyana': 'SA Western Standard Time', + 'America/Halifax': 'Atlantic Standard Time', + 'America/Havana': 'Eastern Standard Time', + 'America/Hermosillo': 'US Mountain Standard Time', + 'America/Indiana/Knox': 'Central Standard Time', + 'America/Indiana/Marengo': 'US Eastern Standard Time', + 'America/Indiana/Petersburg': 'Eastern Standard Time', + 'America/Indiana/Tell_City': 'Central Standard Time', + 'America/Indiana/Vevay': 'US Eastern Standard Time', + 'America/Indiana/Vincennes': 'Eastern Standard Time', + 'America/Indiana/Winamac': 'Eastern Standard Time', + 'America/Indianapolis': 'US Eastern Standard Time', + 'America/Inuvik': 'Mountain Standard Time', + 'America/Iqaluit': 'Eastern Standard Time', + 'America/Jamaica': 'SA Pacific Standard Time', + 'America/Jujuy': 'Argentina Standard Time', + 'America/Juneau': 'Alaskan Standard Time', + 'America/Kentucky/Monticello': 'Eastern Standard Time', + 'America/Kralendijk': 'SA Western Standard Time', + 'America/La_Paz': 'SA Western Standard Time', + 'America/Lima': 'SA Pacific Standard Time', + 'America/Los_Angeles': 'Pacific Standard Time', + 'America/Louisville': 'Eastern Standard Time', + 'America/Lower_Princes': 'SA Western Standard Time', + 'America/Maceio': 'SA Eastern Standard Time', + 'America/Managua': 'Central America Standard Time', + 'America/Manaus': 'SA Western Standard Time', + 'America/Marigot': 'SA Western Standard Time', + 'America/Martinique': 'SA Western Standard Time', + 'America/Matamoros': 'Central Standard Time', + 'America/Mazatlan': 'Mountain Standard Time (Mexico)', + 'America/Mendoza': 'Argentina Standard Time', + 'America/Menominee': 'Central Standard Time', + 'America/Merida': 'Central Standard Time (Mexico)', + 'America/Mexico_City': 'Central Standard Time (Mexico)', + 'America/Moncton': 'Atlantic Standard Time', + 'America/Monterrey': 'Central Standard Time (Mexico)', + 'America/Montevideo': 'Montevideo Standard Time', + 'America/Montreal': 'Eastern Standard Time', + 'America/Montserrat': 'SA Western Standard Time', + 'America/Nassau': 'Eastern Standard Time', + 'America/New_York': 'Eastern Standard Time', + 'America/Nipigon': 'Eastern Standard Time', + 'America/Nome': 'Alaskan Standard Time', + 'America/Noronha': 'UTC-02', + 'America/North_Dakota/Beulah': 'Central Standard Time', + 'America/North_Dakota/Center': 'Central Standard Time', + 'America/North_Dakota/New_Salem': 'Central Standard Time', + 'America/Ojinaga': 'Mountain Standard Time', + 'America/Panama': 'SA Pacific Standard Time', + 'America/Pangnirtung': 'Eastern Standard Time', + 'America/Paramaribo': 'SA Eastern Standard Time', + 'America/Phoenix': 'US Mountain Standard Time', + 'America/Port-au-Prince': 'Eastern Standard Time', + 'America/Port_of_Spain': 'SA Western Standard Time', + 'America/Porto_Velho': 'SA Western Standard Time', + 'America/Puerto_Rico': 'SA Western Standard Time', + 'America/Rainy_River': 'Central Standard Time', + 'America/Rankin_Inlet': 'Central Standard Time', + 'America/Recife': 'SA Eastern Standard Time', + 'America/Regina': 'Canada Central Standard Time', + 'America/Resolute': 'Central Standard Time', + 'America/Rio_Branco': 'SA Pacific Standard Time', + 'America/Santa_Isabel': 'Pacific Standard Time (Mexico)', + 'America/Santarem': 'SA Eastern Standard Time', + 'America/Santiago': 'Pacific SA Standard Time', + 'America/Santo_Domingo': 'SA Western Standard Time', + 'America/Sao_Paulo': 'E. South America Standard Time', + 'America/Scoresbysund': 'Azores Standard Time', + 'America/Sitka': 'Alaskan Standard Time', + 'America/St_Barthelemy': 'SA Western Standard Time', + 'America/St_Johns': 'Newfoundland Standard Time', + 'America/St_Kitts': 'SA Western Standard Time', + 'America/St_Lucia': 'SA Western Standard Time', + 'America/St_Thomas': 'SA Western Standard Time', + 'America/St_Vincent': 'SA Western Standard Time', + 'America/Swift_Current': 'Canada Central Standard Time', + 'America/Tegucigalpa': 'Central America Standard Time', + 'America/Thule': 'Atlantic Standard Time', + 'America/Thunder_Bay': 'Eastern Standard Time', + 'America/Tijuana': 'Pacific Standard Time', + 'America/Toronto': 'Eastern Standard Time', + 'America/Tortola': 'SA Western Standard Time', + 'America/Vancouver': 'Pacific Standard Time', + 'America/Whitehorse': 'Pacific Standard Time', + 'America/Winnipeg': 'Central Standard Time', + 'America/Yakutat': 'Alaskan Standard Time', + 'America/Yellowknife': 'Mountain Standard Time', + 'Antarctica/Casey': 'W. Australia Standard Time', + 'Antarctica/Davis': 'SE Asia Standard Time', + 'Antarctica/DumontDUrville': 'West Pacific Standard Time', + 'Antarctica/Macquarie': 'Central Pacific Standard Time', + 'Antarctica/Mawson': 'West Asia Standard Time', + 'Antarctica/McMurdo': 'New Zealand Standard Time', + 'Antarctica/Palmer': 'Pacific SA Standard Time', + 'Antarctica/Rothera': 'SA Eastern Standard Time', + 'Antarctica/Syowa': 'E. Africa Standard Time', + 'Antarctica/Vostok': 'Central Asia Standard Time', + 'Arctic/Longyearbyen': 'W. Europe Standard Time', + 'Asia/Aden': 'Arab Standard Time', + 'Asia/Almaty': 'Central Asia Standard Time', + 'Asia/Amman': 'Jordan Standard Time', + 'Asia/Anadyr': 'Russia Time Zone 11', + 'Asia/Aqtau': 'West Asia Standard Time', + 'Asia/Aqtobe': 'West Asia Standard Time', + 'Asia/Ashgabat': 'West Asia Standard Time', + 'Asia/Baghdad': 'Arabic Standard Time', + 'Asia/Bahrain': 'Arab Standard Time', + 'Asia/Baku': 'Azerbaijan Standard Time', + 'Asia/Bangkok': 'SE Asia Standard Time', + 'Asia/Beirut': 'Middle East Standard Time', + 'Asia/Bishkek': 'Central Asia Standard Time', + 'Asia/Brunei': 'Singapore Standard Time', + 'Asia/Calcutta': 'India Standard Time', + 'Asia/Chita': 'North Asia East Standard Time', + 'Asia/Choibalsan': 'Ulaanbaatar Standard Time', + 'Asia/Colombo': 'Sri Lanka Standard Time', + 'Asia/Damascus': 'Syria Standard Time', + 'Asia/Dhaka': 'Bangladesh Standard Time', + 'Asia/Dili': 'Tokyo Standard Time', + 'Asia/Dubai': 'Arabian Standard Time', + 'Asia/Dushanbe': 'West Asia Standard Time', + 'Asia/Hong_Kong': 'China Standard Time', + 'Asia/Hovd': 'SE Asia Standard Time', + 'Asia/Irkutsk': 'North Asia East Standard Time', + 'Asia/Jakarta': 'SE Asia Standard Time', + 'Asia/Jayapura': 'Tokyo Standard Time', + 'Asia/Jerusalem': 'Israel Standard Time', + 'Asia/Kabul': 'Afghanistan Standard Time', + 'Asia/Kamchatka': 'Russia Time Zone 11', + 'Asia/Karachi': 'Pakistan Standard Time', + 'Asia/Katmandu': 'Nepal Standard Time', + 'Asia/Khandyga': 'Yakutsk Standard Time', + 'Asia/Krasnoyarsk': 'North Asia Standard Time', + 'Asia/Kuala_Lumpur': 'Singapore Standard Time', + 'Asia/Kuching': 'Singapore Standard Time', + 'Asia/Kuwait': 'Arab Standard Time', + 'Asia/Macau': 'China Standard Time', + 'Asia/Magadan': 'Magadan Standard Time', + 'Asia/Makassar': 'Singapore Standard Time', + 'Asia/Manila': 'Singapore Standard Time', + 'Asia/Muscat': 'Arabian Standard Time', + 'Asia/Nicosia': 'GTB Standard Time', + 'Asia/Novokuznetsk': 'North Asia Standard Time', + 'Asia/Novosibirsk': 'N. Central Asia Standard Time', + 'Asia/Omsk': 'N. Central Asia Standard Time', + 'Asia/Oral': 'West Asia Standard Time', + 'Asia/Phnom_Penh': 'SE Asia Standard Time', + 'Asia/Pontianak': 'SE Asia Standard Time', + 'Asia/Pyongyang': 'Korea Standard Time', + 'Asia/Qatar': 'Arab Standard Time', + 'Asia/Qyzylorda': 'Central Asia Standard Time', + 'Asia/Rangoon': 'Myanmar Standard Time', + 'Asia/Riyadh': 'Arab Standard Time', + 'Asia/Saigon': 'SE Asia Standard Time', + 'Asia/Sakhalin': 'Vladivostok Standard Time', + 'Asia/Samarkand': 'West Asia Standard Time', + 'Asia/Seoul': 'Korea Standard Time', + 'Asia/Shanghai': 'China Standard Time', + 'Asia/Singapore': 'Singapore Standard Time', + 'Asia/Srednekolymsk': 'Russia Time Zone 10', + 'Asia/Taipei': 'Taipei Standard Time', + 'Asia/Tashkent': 'West Asia Standard Time', + 'Asia/Tbilisi': 'Georgian Standard Time', + 'Asia/Tehran': 'Iran Standard Time', + 'Asia/Thimphu': 'Bangladesh Standard Time', + 'Asia/Tokyo': 'Tokyo Standard Time', + 'Asia/Ulaanbaatar': 'Ulaanbaatar Standard Time', + 'Asia/Urumqi': 'Central Asia Standard Time', + 'Asia/Ust-Nera': 'Vladivostok Standard Time', + 'Asia/Vientiane': 'SE Asia Standard Time', + 'Asia/Vladivostok': 'Vladivostok Standard Time', + 'Asia/Yakutsk': 'Yakutsk Standard Time', + 'Asia/Yekaterinburg': 'Ekaterinburg Standard Time', + 'Asia/Yerevan': 'Caucasus Standard Time', + 'Atlantic/Azores': 'Azores Standard Time', + 'Atlantic/Bermuda': 'Atlantic Standard Time', + 'Atlantic/Canary': 'GMT Standard Time', + 'Atlantic/Cape_Verde': 'Cape Verde Standard Time', + 'Atlantic/Faeroe': 'GMT Standard Time', + 'Atlantic/Madeira': 'GMT Standard Time', + 'Atlantic/Reykjavik': 'Greenwich Standard Time', + 'Atlantic/South_Georgia': 'UTC-02', + 'Atlantic/St_Helena': 'Greenwich Standard Time', + 'Atlantic/Stanley': 'SA Eastern Standard Time', + 'Australia/Adelaide': 'Cen. Australia Standard Time', + 'Australia/Brisbane': 'E. Australia Standard Time', + 'Australia/Broken_Hill': 'Cen. Australia Standard Time', + 'Australia/Currie': 'Tasmania Standard Time', + 'Australia/Darwin': 'AUS Central Standard Time', + 'Australia/Hobart': 'Tasmania Standard Time', + 'Australia/Lindeman': 'E. Australia Standard Time', + 'Australia/Melbourne': 'AUS Eastern Standard Time', + 'Australia/Perth': 'W. Australia Standard Time', + 'Australia/Sydney': 'AUS Eastern Standard Time', + 'CST6CDT': 'Central Standard Time', + 'EST5EDT': 'Eastern Standard Time', + 'Etc/GMT': 'UTC', + 'Etc/GMT+1': 'Cape Verde Standard Time', + 'Etc/GMT+10': 'Hawaiian Standard Time', + 'Etc/GMT+11': 'UTC-11', + 'Etc/GMT+12': 'Dateline Standard Time', + 'Etc/GMT+2': 'UTC-02', + 'Etc/GMT+3': 'SA Eastern Standard Time', + 'Etc/GMT+4': 'SA Western Standard Time', + 'Etc/GMT+5': 'SA Pacific Standard Time', + 'Etc/GMT+6': 'Central America Standard Time', + 'Etc/GMT+7': 'US Mountain Standard Time', + 'Etc/GMT-1': 'W. Central Africa Standard Time', + 'Etc/GMT-10': 'West Pacific Standard Time', + 'Etc/GMT-11': 'Central Pacific Standard Time', + 'Etc/GMT-12': 'UTC+12', + 'Etc/GMT-13': 'Tonga Standard Time', + 'Etc/GMT-14': 'Line Islands Standard Time', + 'Etc/GMT-2': 'South Africa Standard Time', + 'Etc/GMT-3': 'E. Africa Standard Time', + 'Etc/GMT-4': 'Arabian Standard Time', + 'Etc/GMT-5': 'West Asia Standard Time', + 'Etc/GMT-6': 'Central Asia Standard Time', + 'Etc/GMT-7': 'SE Asia Standard Time', + 'Etc/GMT-8': 'Singapore Standard Time', + 'Etc/GMT-9': 'Tokyo Standard Time', + 'Etc/UTC': 'UTC', + 'Europe/Amsterdam': 'W. Europe Standard Time', + 'Europe/Andorra': 'W. Europe Standard Time', + 'Europe/Athens': 'GTB Standard Time', + 'Europe/Belgrade': 'Central Europe Standard Time', + 'Europe/Berlin': 'W. Europe Standard Time', + 'Europe/Bratislava': 'Central Europe Standard Time', + 'Europe/Brussels': 'Romance Standard Time', + 'Europe/Bucharest': 'GTB Standard Time', + 'Europe/Budapest': 'Central Europe Standard Time', + 'Europe/Busingen': 'W. Europe Standard Time', + 'Europe/Chisinau': 'GTB Standard Time', + 'Europe/Copenhagen': 'Romance Standard Time', + 'Europe/Dublin': 'GMT Standard Time', + 'Europe/Gibraltar': 'W. Europe Standard Time', + 'Europe/Guernsey': 'GMT Standard Time', + 'Europe/Helsinki': 'FLE Standard Time', + 'Europe/Isle_of_Man': 'GMT Standard Time', + 'Europe/Istanbul': 'Turkey Standard Time', + 'Europe/Jersey': 'GMT Standard Time', + 'Europe/Kaliningrad': 'Kaliningrad Standard Time', + 'Europe/Kiev': 'FLE Standard Time', + 'Europe/Lisbon': 'GMT Standard Time', + 'Europe/Ljubljana': 'Central Europe Standard Time', + 'Europe/London': 'GMT Standard Time', + 'Europe/Luxembourg': 'W. Europe Standard Time', + 'Europe/Madrid': 'Romance Standard Time', + 'Europe/Malta': 'W. Europe Standard Time', + 'Europe/Mariehamn': 'FLE Standard Time', + 'Europe/Minsk': 'Belarus Standard Time', + 'Europe/Monaco': 'W. Europe Standard Time', + 'Europe/Moscow': 'Russian Standard Time', + 'Europe/Oslo': 'W. Europe Standard Time', + 'Europe/Paris': 'Romance Standard Time', + 'Europe/Podgorica': 'Central Europe Standard Time', + 'Europe/Prague': 'Central Europe Standard Time', + 'Europe/Riga': 'FLE Standard Time', + 'Europe/Rome': 'W. Europe Standard Time', + 'Europe/Samara': 'Russia Time Zone 3', + 'Europe/San_Marino': 'W. Europe Standard Time', + 'Europe/Sarajevo': 'Central European Standard Time', + 'Europe/Simferopol': 'Russian Standard Time', + 'Europe/Skopje': 'Central European Standard Time', + 'Europe/Sofia': 'FLE Standard Time', + 'Europe/Stockholm': 'W. Europe Standard Time', + 'Europe/Tallinn': 'FLE Standard Time', + 'Europe/Tirane': 'Central Europe Standard Time', + 'Europe/Uzhgorod': 'FLE Standard Time', + 'Europe/Vaduz': 'W. Europe Standard Time', + 'Europe/Vatican': 'W. Europe Standard Time', + 'Europe/Vienna': 'W. Europe Standard Time', + 'Europe/Vilnius': 'FLE Standard Time', + 'Europe/Volgograd': 'Russian Standard Time', + 'Europe/Warsaw': 'Central European Standard Time', + 'Europe/Zagreb': 'Central European Standard Time', + 'Europe/Zaporozhye': 'FLE Standard Time', + 'Europe/Zurich': 'W. Europe Standard Time', + 'Indian/Antananarivo': 'E. Africa Standard Time', + 'Indian/Chagos': 'Central Asia Standard Time', + 'Indian/Christmas': 'SE Asia Standard Time', + 'Indian/Cocos': 'Myanmar Standard Time', + 'Indian/Comoro': 'E. Africa Standard Time', + 'Indian/Kerguelen': 'West Asia Standard Time', + 'Indian/Mahe': 'Mauritius Standard Time', + 'Indian/Maldives': 'West Asia Standard Time', + 'Indian/Mauritius': 'Mauritius Standard Time', + 'Indian/Mayotte': 'E. Africa Standard Time', + 'Indian/Reunion': 'Mauritius Standard Time', + 'MST7MDT': 'Mountain Standard Time', + 'PST8PDT': 'Pacific Standard Time', + 'Pacific/Apia': 'Samoa Standard Time', + 'Pacific/Auckland': 'New Zealand Standard Time', + 'Pacific/Efate': 'Central Pacific Standard Time', + 'Pacific/Enderbury': 'Tonga Standard Time', + 'Pacific/Fakaofo': 'Tonga Standard Time', + 'Pacific/Fiji': 'Fiji Standard Time', + 'Pacific/Funafuti': 'UTC+12', + 'Pacific/Galapagos': 'Central America Standard Time', + 'Pacific/Guadalcanal': 'Central Pacific Standard Time', + 'Pacific/Guam': 'West Pacific Standard Time', + 'Pacific/Honolulu': 'Hawaiian Standard Time', + 'Pacific/Johnston': 'Hawaiian Standard Time', + 'Pacific/Kiritimati': 'Line Islands Standard Time', + 'Pacific/Kosrae': 'Central Pacific Standard Time', + 'Pacific/Kwajalein': 'UTC+12', + 'Pacific/Majuro': 'UTC+12', + 'Pacific/Midway': 'UTC-11', + 'Pacific/Nauru': 'UTC+12', + 'Pacific/Niue': 'UTC-11', + 'Pacific/Noumea': 'Central Pacific Standard Time', + 'Pacific/Pago_Pago': 'UTC-11', + 'Pacific/Palau': 'Tokyo Standard Time', + 'Pacific/Ponape': 'Central Pacific Standard Time', + 'Pacific/Port_Moresby': 'West Pacific Standard Time', + 'Pacific/Rarotonga': 'Hawaiian Standard Time', + 'Pacific/Saipan': 'West Pacific Standard Time', + 'Pacific/Tahiti': 'Hawaiian Standard Time', + 'Pacific/Tarawa': 'UTC+12', + 'Pacific/Tongatapu': 'Tonga Standard Time', + 'Pacific/Truk': 'West Pacific Standard Time', + 'Pacific/Wake': 'UTC+12', + 'Pacific/Wallis': 'UTC+12'} diff --git a/mylar/PostProcessor.py b/mylar/PostProcessor.py index 9bc57312..a55c696f 100755 --- a/mylar/PostProcessor.py +++ b/mylar/PostProcessor.py @@ -68,6 +68,8 @@ class PostProcessor(object): self.fileop = shutil.move self.valreturn = [] + self.extensions = ('.cbr', '.cbz', '.pdf') + self.failed_files = 0 self.log = '' def _log(self, message, level=logger.message): #level=logger.MESSAGE): @@ -165,10 +167,10 @@ class PostProcessor(object): def duplicate_process(self, dupeinfo): #path to move 'should' be the entire path to the given file - path_to_move = dupeinfo[0]['to_dupe'] + path_to_move = dupeinfo['to_dupe'] file_to_move = os.path.split(path_to_move)[1] - if dupeinfo[0]['action'] == 'dupe_src' and mylar.FILE_OPTS == 'move': + if dupeinfo['action'] == 'dupe_src' and mylar.FILE_OPTS == 'move': logger.info('[DUPLICATE-CLEANUP] New File will be post-processed. Moving duplicate [' + path_to_move + '] to Duplicate Dump Folder for manual intervention.') else: if mylar.FILE_OPTS == 'move': @@ -204,7 +206,7 @@ class PostProcessor(object): orig_folder = sub_path else: orig_folder = self.nzb_folder - + #make sure we don't delete the directory passed via manual-pp and ajust for trailling slashes or not if orig_folder.endswith('/') or orig_folder.endswith('\\'): tmp_folder = orig_folder[:-1] @@ -305,6 +307,8 @@ class PostProcessor(object): logger.fdebug(module + ' NZBGET Download folder option enabled. Directory set to : ' + self.nzb_folder) myDB = db.DBConnection() + self.oneoffinlist = False + if self.nzb_name == 'Manual Run': logger.fdebug (module + ' Manual Run initiated') #Manual postprocessing on a folder. @@ -313,7 +317,7 @@ class PostProcessor(object): filelist = flc.listFiles() if filelist['comiccount'] == 0: # is None: logger.warn('There were no files located - check the debugging logs if you think this is in error.') - return + return logger.info('I have located ' + str(filelist['comiccount']) + ' files that I should be able to post-process. Continuing...') #load the hashes for torrents so continual post-processing of same issues don't occur. @@ -490,7 +494,7 @@ class PostProcessor(object): issyr = None #logger.fdebug(module + ' issuedate:' + str(issuechk['IssueDate'])) #logger.fdebug(module + ' issuechk: ' + str(issuechk['IssueDate'][5:7])) - + #logger.info(module + ' ReleaseDate: ' + str(issuechk['ReleaseDate'])) #logger.info(module + ' IssueDate: ' + str(issuechk['IssueDate'])) if issuechk['ReleaseDate'] is not None and issuechk['ReleaseDate'] != '0000-00-00': @@ -550,7 +554,7 @@ class PostProcessor(object): datematch = "False" elif len(watchvals) == 1 and int(tmp_watchlist_vol) == 1: logger.fdebug(module + '[ISSUE-VERIFY][Lone Volume MATCH] Volume label of ' + str(watch_values['ComicVersion']) + ' indicates only volume for this series on your watchlist.') - elif int(tmp_watchlist_vol) > 1: + elif int(tmp_watchlist_vol) > 1: logger.fdebug(module + '[ISSUE-VERIFY][Lone Volume FAILURE] Volume label of ' + str(watch_values['ComicVersion']) + ' indicates that there is more than one volume for this series, but the one on your watchlist has no volume label set.') datematch = "False" @@ -558,7 +562,7 @@ class PostProcessor(object): #now we see if the issue year matches exactly to what we have within Mylar. if int(watch_issueyear) == int(watchmatch['issue_year']): logger.fdebug(module + '[ISSUE-VERIFY][Issue Year MATCH] Issue Year of ' + str(watch_issueyear) + ' is a match to the year found in the filename of : ' + str(watchmatch['issue_year'])) - datematch = 'True' + datematch = 'True' else: logger.fdebug(module + '[ISSUE-VERIFY][Issue Year FAILURE] Issue Year of ' + str(watch_issueyear) + ' does NOT match the year found in the filename of : ' + str(watchmatch['issue_year'])) logger.fdebug(module + '[ISSUE-VERIFY] Checking against complete date to see if month published could allow for different publication year.') @@ -578,7 +582,8 @@ class PostProcessor(object): "ComicID": cs['ComicID'], "IssueID": issuechk['IssueID'], "IssueNumber": issuechk['Issue_Number'], - "ComicName": cs['ComicName']}) + "ComicName": cs['ComicName'], + "One-Off": False}) else: logger.fdebug(module + '[NON-MATCH: ' + cs['ComicName'] + '-' + cs['ComicID'] + '] Incorrect series - not populating..continuing post-processing') continue @@ -592,7 +597,7 @@ class PostProcessor(object): #we should setup for manual post-processing of story-arc issues here #we can also search by ComicID to just grab those particular arcs as an alternative as well (not done) - + #as_d = filechecker.FileChecker() #as_dinfo = as_d.dynamic_replace(helpers.conversion(fl['series_name'])) #mod_seriesname = as_dinfo['mod_seriesname'] @@ -801,7 +806,7 @@ class PostProcessor(object): if metaresponse == "fail": logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') - elif metaresponse == "unrar error": + elif any([metaresponse == "unrar error", metaresponse == "corrupt"]): logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') continue #launch failed download handling here. @@ -843,10 +848,10 @@ class PostProcessor(object): if renamed_file: dfilename = renamed_file['nfilename'] logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) - + #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename if mylar.READ2FILENAME: - + logger.fdebug(module + ' readingorder#: ' + str(ml['ReadingOrder'])) if int(ml['ReadingOrder']) < 10: readord = "00" + str(ml['ReadingOrder']) elif int(ml['ReadingOrder']) >= 10 and int(ml['ReadingOrder']) <= 99: readord = "0" + str(ml['ReadingOrder']) @@ -879,7 +884,7 @@ class PostProcessor(object): #IssArcID = 'S' + str(ml['IssueArcID']) myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', ['S' + str(ml['IssueArcID']),ml['StoryArc']]) myDB.action('DELETE from nzblog WHERE IssueID=? AND SARC=?', [ml['IssueArcID'],ml['StoryArc']]) - + logger.fdebug(module + ' IssueArcID: ' + str(ml['IssueArcID'])) ctrlVal = {"IssueArcID": ml['IssueArcID']} newVal = {"Status": "Downloaded", @@ -889,315 +894,402 @@ class PostProcessor(object): logger.fdebug(module + ' [' + ml['StoryArc'] + '] Post-Processing completed for: ' + grab_dst) - else: - nzbname = self.nzb_name - #remove extensions from nzb_name if they somehow got through (Experimental most likely) - extensions = ('.cbr', '.cbz') - - if nzbname.lower().endswith(extensions): - fd, ext = os.path.splitext(nzbname) - self._log("Removed extension from nzb: " + ext) - nzbname = re.sub(str(ext), '', str(nzbname)) - - #replace spaces - # let's change all space to decimals for simplicity - logger.fdebug('[NZBNAME]: ' + nzbname) - #gotta replace & or escape it - nzbname = re.sub("\&", 'and', nzbname) - nzbname = re.sub('[\,\:\?\'\+]', '', nzbname) - nzbname = re.sub('[\(\)]', ' ', nzbname) - logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname) - nzbname = re.sub('.cbr', '', nzbname).strip() - nzbname = re.sub('.cbz', '', nzbname).strip() - nzbname = re.sub('[\.\_]', ' ', nzbname).strip() - nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces. - logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname) - nzbname = re.sub('\s', '.', nzbname) - - logger.fdebug(module + ' After conversions, nzbname is : ' + str(nzbname)) -# if mylar.USE_NZBGET==1: -# nzbname=self.nzb_name - self._log("nzbname: " + str(nzbname)) - - nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() - - self.oneoff = False - if nzbiss is None: - self._log("Failure - could not initially locate nzbfile in my database to rename.") - logger.fdebug(module + ' Failure - could not locate nzbfile initially') - # if failed on spaces, change it all to decimals and try again. - nzbname = re.sub('[\(\)]', '', str(nzbname)) - self._log("trying again with this nzbname: " + str(nzbname)) - logger.fdebug(module + ' Trying to locate nzbfile again with nzbname of : ' + str(nzbname)) - nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() - if nzbiss is None: - logger.error(module + ' Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') - #set it up to run manual post-processing on self.nzb_folder - self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') - self.valreturn.append({"self.log": self.log, - "mode": 'outside'}) - return self.queue.put(self.valreturn) + else: + #one-off manual pp'd of torrents + oneofflist = myDB.select("select s.Issue_Number, s.ComicName, s.IssueID, s.ComicID, s.Provider, w.PUBLISHER, w.weeknumber, w.year from snatched as s inner join nzblog as n on s.IssueID = n.IssueID and s.Hash is not NULL inner join weekly as w on s.IssueID = w.IssueID WHERE (s.Provider ='32P' or s.Provider='TPSE' or s.Provider='WWT' or s.Provider='DEM') AND n.OneOff == 1;") + if oneofflist is None: + logger.fdebug(module + ' No one-off\'s have ever been snatched using Mylar.') else: - self._log("I corrected and found the nzb as : " + str(nzbname)) - logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname)) - #issueid = nzbiss['IssueID'] + oneoffvals = [] + oneoff_issuelist = [] + nm = 0 + for ofl in oneofflist: + oneoffvals.append({"ComicName": ofl['ComicName'], + "ComicPublisher": ofl['PUBLISHER'], + "Issue_Number": ofl['Issue_Number'], + "AlternateSearch": None, + "ComicID": ofl['ComicID'], + "IssueID": ofl['IssueID'], + "WatchValues": {"SeriesYear": None, + "LatestDate": None, + "ComicVersion": None, + "Publisher": ofl['PUBLISHER'], + "Total": None, + "ComicID": ofl['ComicID'], + "IsArc": False}}) - issueid = nzbiss['IssueID'] - logger.fdebug(module + ' Issueid: ' + str(issueid)) - sarc = nzbiss['SARC'] - self.oneoff = nzbiss['OneOff'] - tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone() + for fl in filelist['comiclist']: + #logger.info('fl: %s' % fl) + for ofv in oneoffvals: + #logger.info('ofv: %s' % ofv) + wm = filechecker.FileChecker(watchcomic=ofv['ComicName'], Publisher=ofv['ComicPublisher'], AlternateSearch=None, manual=ofv['WatchValues']) + watchmatch = wm.matchIT(fl) + if watchmatch['process_status'] == 'fail': + nm+=1 + continue + else: + temploc= watchmatch['justthedigits'].replace('_', ' ') + temploc = re.sub('[\#\']', '', temploc) - comicid = None - comicname = None - issuenumber = None - if tmpiss is not None: - comicid = tmpiss['ComicID'] - comicname = tmpiss['ComicName'] - issuenumber = tmpiss['Issue_Number'] - elif all([self.oneoff is not None, mylar.ALT_PULL == 2]): - oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone() - if oneinfo is not None: - comicid = oneinfo['ComicID'] - comicname = oneinfo['COMIC'] - issuenumber = oneinfo['ISSUE'] - publisher = oneinfo['PUBLISHER'] - self.oneoff = True - logger.info(module + ' Discovered %s # %s by %s [comicid:%s][issueid:%s]' % (comicname, issuenumber, publisher, comicid, issueid)) - #use issueid to get publisher, series, year, issue number + logger.info('watchmatch: %s' % watchmatch) + if 'annual' in temploc.lower(): + biannchk = re.sub('-', '', temploc.lower()).strip() + if 'biannual' in biannchk: + logger.fdebug(module + ' Bi-Annual detected.') + fcdigit = helpers.issuedigits(re.sub('biannual', '', str(biannchk)).strip()) + else: + fcdigit = helpers.issuedigits(re.sub('annual', '', str(temploc.lower())).strip()) + logger.fdebug(module + ' Annual detected [' + str(fcdigit) +']. ComicID assigned as ' + str(ofv['ComicID'])) + annchk = "yes" + else: + fcdigit = helpers.issuedigits(temploc) - annchk = "no" -# if 'annual' in nzbname.lower(): -# logger.info(module + ' Annual detected.') -# annchk = "yes" -# issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() -# else: -# issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if fcdigit == helpers.issuedigits(ofv['Issue_Number']): + if watchmatch['sub']: + clocation = os.path.join(watchmatch['comiclocation'], watchmatch['sub'], helpers.conversion(watchmatch['comicfilename'])) + else: + clocation = os.path.join(watchmatch['comiclocation'],helpers.conversion(watchmatch['comicfilename'])) + oneoff_issuelist.append({"ComicLocation": clocation, + "ComicID": ofv['ComicID'], + "IssueID": ofv['IssueID'], + "IssueNumber": ofv['Issue_Number'], + "ComicName": ofv['ComicName'], + "One-Off": True}) + self.oneoffinlist = True + else: + logger.fdebug(module + ' No corresponding issue # in dB found for %s # %s' % (ofv['ComicName'],ofv['Issue_Number'])) + continue - issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() - if issuenzb is None: - logger.info(module + ' Could not detect as a standard issue - checking against annuals.') - issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() - if issuenzb is None: - logger.info(module + ' issuenzb not found.') - #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume - #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. - if 'S' in issueid: - sandwich = issueid - elif 'G' in issueid or '-' in issueid: - sandwich = 1 - elif any([self.oneoff is True, issueid >= '900000', issueid == '1']): - logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') - sandwich = None #arbitrarily set it to None just to force one-off downloading below. - else: - logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') + logger.fdebug(module + '[SUCCESSFUL MATCH: ' + ofv['ComicName'] + '-' + ofv['ComicID'] + '] Match verified for ' + helpers.conversion(fl['comicfilename'])) + break + + if any([self.nzb_name != 'Manual Run', self.oneoffinlist is True]): + ppinfo = [] + if self.oneoffinlist is False: + nzbname = self.nzb_name + #remove extensions from nzb_name if they somehow got through (Experimental most likely) + if nzbname.lower().endswith(self.extensions): + fd, ext = os.path.splitext(nzbname) + self._log("Removed extension from nzb: " + ext) + nzbname = re.sub(str(ext), '', str(nzbname)) + + #replace spaces + # let's change all space to decimals for simplicity + logger.fdebug('[NZBNAME]: ' + nzbname) + #gotta replace & or escape it + nzbname = re.sub("\&", 'and', nzbname) + nzbname = re.sub('[\,\:\?\'\+]', '', nzbname) + nzbname = re.sub('[\(\)]', ' ', nzbname) + logger.fdebug('[NZBNAME] nzbname (remove chars): ' + nzbname) + nzbname = re.sub('.cbr', '', nzbname).strip() + nzbname = re.sub('.cbz', '', nzbname).strip() + nzbname = re.sub('[\.\_]', ' ', nzbname).strip() + nzbname = re.sub('\s+', ' ', nzbname) #make sure we remove the extra spaces. + logger.fdebug('[NZBNAME] nzbname (remove extensions, double spaces, convert underscores to spaces): ' + nzbname) + nzbname = re.sub('\s', '.', nzbname) + + logger.fdebug(module + ' After conversions, nzbname is : ' + str(nzbname)) +# if mylar.USE_NZBGET==1: +# nzbname=self.nzb_name + self._log("nzbname: " + str(nzbname)) + + nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() + + self.oneoff = False + if nzbiss is None: + self._log("Failure - could not initially locate nzbfile in my database to rename.") + logger.fdebug(module + ' Failure - could not locate nzbfile initially') + # if failed on spaces, change it all to decimals and try again. + nzbname = re.sub('[\(\)]', '', str(nzbname)) + self._log("trying again with this nzbname: " + str(nzbname)) + logger.fdebug(module + ' Trying to locate nzbfile again with nzbname of : ' + str(nzbname)) + nzbiss = myDB.selectone("SELECT * from nzblog WHERE nzbname=? or altnzbname=?", [nzbname, nzbname]).fetchone() + if nzbiss is None: + logger.error(module + ' Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') + #set it up to run manual post-processing on self.nzb_folder self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') self.valreturn.append({"self.log": self.log, "mode": 'outside'}) return self.queue.put(self.valreturn) - else: - logger.info(module + ' Successfully located issue as an annual. Continuing.') - annchk = "yes" + else: + self._log("I corrected and found the nzb as : " + str(nzbname)) + logger.fdebug(module + ' Auto-corrected and found the nzb as : ' + str(nzbname)) + #issueid = nzbiss['IssueID'] - if issuenzb is not None: - logger.info(module + ' issuenzb found.') - if helpers.is_number(issueid): - sandwich = int(issuenzb['IssueID']) -# else: -# logger.info(module + ' issuenzb not found.') -# #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume -# #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. -# if 'S' in issueid: -# sandwich = issueid -# elif 'G' in issueid or '-' in issueid: -# sandwich = 1 - if sandwich is not None and helpers.is_number(sandwich): - if sandwich < 900000: - # if sandwich is less than 900000 it's a normal watchlist download. Bypass. - pass + issueid = nzbiss['IssueID'] + logger.fdebug(module + ' Issueid: ' + str(issueid)) + sarc = nzbiss['SARC'] + self.oneoff = nzbiss['OneOff'] + tmpiss = myDB.selectone('SELECT * FROM issues WHERE IssueID=?', [issueid]).fetchone() + + comicid = None + comicname = None + issuenumber = None + if tmpiss is not None: + ppinfo.append({'comicid': tmpiss['ComicID'], + 'issueid': issueid, + 'comicname': tmpiss['ComicName'], + 'issuenumber': tmpiss['Issue_Number'], + 'publisher': None, + 'sarc': sarc, + 'oneoff': self.oneoff}) + + elif all([self.oneoff is not None, mylar.ALT_PULL == 2]): + oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [issueid]).fetchone() + if oneinfo is not None: + ppinfo.append({'comicid': oneinfo['ComicID'], + 'comicname': oneinfo['COMIC'], + 'issuenumber': oneinfo['ISSUE'], + 'publisher': oneinfo['PUBLISHER'], + 'issueid': issueid, + 'sarc': None, + 'oneoff': True}) + + self.oneoff = True + #logger.info(module + ' Discovered %s # %s by %s [comicid:%s][issueid:%s]' % (comicname, issuenumber, publisher, comicid, issueid)) + #use issueid to get publisher, series, year, issue number else: - if any([self.oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: - # this has no issueID, therefore it's a one-off or a manual post-proc. - # At this point, let's just drop it into the Comic Location folder and forget about it.. - if sandwich is not None and 'S' in sandwich: - self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) - logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc)) - else: - self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") - logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') - self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) - grdst = mylar.GRABBAG_DIR + for x in oneoff_issuelist: + if x['One-Off'] is True: + oneinfo = myDB.selectone('SELECT * FROM weekly WHERE IssueID=?', [x['IssueID']]).fetchone() + if oneinfo is not None: + ppinfo.append({'comicid': oneinfo['ComicID'], + 'comicname': oneinfo['COMIC'], + 'issuenumber': oneinfo['ISSUE'], + 'publisher': oneinfo['PUBLISHER'], + 'issueid': x['IssueID'], + 'comiclocation': x['ComicLocation'], + 'sarc': None, + 'oneoff': x['One-Off']}) + self.oneoff = True - odir = None - ofilename = None - for root, dirnames, filenames in os.walk(self.nzb_folder): - for filename in filenames: - if filename.lower().endswith(extensions): - odir = root - ofilename = filename - path, ext = os.path.splitext(ofilename) + if len(ppinfo) > 0: + for pp in ppinfo: + logger.info('[PPINFO-POST-PROCESSING-ATTEMPT] %s' % pp) + self.nzb_or_oneoff_pp(tinfo=pp) - if ofilename is None: - logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') - self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) + #if len(manual_list) > 0: + #for ml in manual_list: + # logger.info('[MANUAL-POST-PROCESSING-ATTEMPT] %s' % ml) + #self.nzb_or_oneoff_pp(manual=manual_list) - if odir is None: - odir = self.nzb_folder - - if sandwich is not None and 'S' in sandwich: - issuearcid = re.sub('S', '', issueid) - logger.fdebug(module + ' issuearcid:' + str(issuearcid)) - arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() - if arcdata is None: - logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary') - self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) - - if arcdata['Publisher'] is None: - arcpub = arcdata['IssuePublisher'] - else: - arcpub = arcdata['Publisher'] - - grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) - - if comicid is None: - comicid = arcdata['ComicID'] - if comicname is None: - comicname = arcdata['ComicName'] - if issuenumber is None: - issuenumber = arcdata['IssueNumber'] - issueid = arcdata['IssueID'] - - #tag the meta. - metaresponse = None - - crcvalue = helpers.crc(os.path.join(self.nzb_folder, ofilename)) - - #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. - #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. - if all([mylar.ENABLE_META, issueid is not None]): - self._log("Metatagging enabled - proceeding...") - try: - import cmtagmylar - metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) - except ImportError: - logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') - metaresponse = "fail" - - if metaresponse == "fail": - logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') - elif metaresponse == "unrar error": - logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') - #launch failed download handling here. - elif metaresponse.startswith('file not found'): - filename_in_error = os.path.split(metaresponse, '||')[1] - self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...") - logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...') - else: - odir = os.path.split(metaresponse)[0] - ofilename = os.path.split(metaresponse)[1] - ext = os.path.splitext(metaresponse)[1] - logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') - self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') - - dfilename = ofilename - - if metaresponse: - src_location = odir - else: - src_location = self.nzb_folder - - grab_src = os.path.join(src_location, ofilename) - self._log("Source Path : " + grab_src) - logger.info(module + ' Source Path : ' + grab_src) - - checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) - if not checkdirectory: - logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) - - #send to renamer here if valid. - if mylar.RENAME_FILES: - renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) - if renamed_file: - dfilename = renamed_file['nfilename'] - logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) - - - if sandwich is not None and 'S' in sandwich: - #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename - if mylar.READ2FILENAME: - logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) - if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) - elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) - else: readord = str(arcdata['ReadingOrder']) - dfilename = str(readord) + "-" + dfilename - else: - dfilename = ofilename - grab_dst = os.path.join(grdst, dfilename) - else: - grab_dst = os.path.join(grdst, ofilename) - - self._log("Destination Path : " + grab_dst) - - logger.info(module + ' Destination Path : ' + grab_dst) - - logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst)) - - try: - fileoperation = helpers.file_ops(grab_src, grab_dst) - if not fileoperation: - raise OSError - except (OSError, IOError): - logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') - self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") - return - - #tidyup old path - if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): - self.tidyup(src_location, True) - - #delete entry from nzblog table - myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) - - if sandwich is not None and 'S' in sandwich: - #issuearcid = re.sub('S', '', issueid) - logger.info(module + ' IssueArcID is : ' + str(issuearcid)) - ctrlVal = {"IssueArcID": issuearcid} - newVal = {"Status": "Downloaded", - "Location": grab_dst} - myDB.upsert("readinglist", newVal, ctrlVal) - logger.info(module + ' Updated status to Downloaded') - - logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) - self._log(u"Post Processing SUCCESSFUL! ") - elif self.oneoff is True: - logger.info(module + ' IssueID is : ' + str(issueid)) - ctrlVal = {"IssueID": issueid} - newVal = {"Status": "Downloaded"} - logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) - myDB.upsert("weekly", newVal, ctrlVal) - logger.info(module + ' Updated status to Downloaded') - myDB.upsert("oneoffhistory", newVal, ctrlVal) - logger.info(module + ' Updated history for one-off\'s for tracking purposes') - logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) - self._log(u"Post Processing SUCCESSFUL! ") - - try: - self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) - except: - pass - - self.valreturn.append({"self.log": self.log, - "mode": 'stop'}) - return self.queue.put(self.valreturn) +# annchk = "no" +# issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() +# if issuenzb is None: +# logger.info(module + ' Could not detect as a standard issue - checking against annuals.') +# issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() +# if issuenzb is None: +# logger.info(module + ' issuenzb not found.') +# #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume +# #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. +# if 'S' in issueid: +# sandwich = issueid +# elif 'G' in issueid or '-' in issueid: +# sandwich = 1 +# elif any([self.oneoff is True, issueid >= '900000', issueid == '1']): +# logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') +# sandwich = None #arbitrarily set it to None just to force one-off downloading below. +# else: +# logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') +# self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'outside'}) +# return self.queue.put(self.valreturn) +# else: +# logger.info(module + ' Successfully located issue as an annual. Continuing.') +# annchk = "yes" +# +# if issuenzb is not None: +# logger.info(module + ' issuenzb found.') +# if helpers.is_number(issueid): +# sandwich = int(issuenzb['IssueID']) +# if sandwich is not None and helpers.is_number(sandwich): +# if sandwich < 900000: +# # if sandwich is less than 900000 it's a normal watchlist download. Bypass. +# pass +# else: +# if any([self.oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: +# # this has no issueID, therefore it's a one-off or a manual post-proc. +# # At this point, let's just drop it into the Comic Location folder and forget about it.. +# if sandwich is not None and 'S' in sandwich: +# self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) +# logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + str(sarc)) +# else: +# self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") +# logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') +# self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) +# grdst = mylar.GRABBAG_DIR +# +# odir = None +# ofilename = None +# for root, dirnames, filenames in os.walk(self.nzb_folder): +# for filename in filenames: +# if filename.lower().endswith(self.extensions): +# odir = root +# ofilename = filename +# path, ext = os.path.splitext(ofilename) +# +# if ofilename is None: +# logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') +# self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# if odir is None: +# odir = self.nzb_folder +# +# if sandwich is not None and 'S' in sandwich: +# issuearcid = re.sub('S', '', issueid) +# logger.fdebug(module + ' issuearcid:' + str(issuearcid)) +# arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() +# if arcdata is None: +# logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary') +# self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# if arcdata['Publisher'] is None: +# arcpub = arcdata['IssuePublisher'] +# else: +# arcpub = arcdata['Publisher'] +# +# grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) +# +# if comicid is None: +# comicid = arcdata['ComicID'] +# if comicname is None: +# comicname = arcdata['ComicName'] +# if issuenumber is None: +# issuenumber = arcdata['IssueNumber'] +# issueid = arcdata['IssueID'] +# +# #tag the meta. +# metaresponse = None +# +# crcvalue = helpers.crc(os.path.join(self.nzb_folder, ofilename)) +# +# #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. +# #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. +# if all([mylar.ENABLE_META, issueid is not None]): +# self._log("Metatagging enabled - proceeding...") +# try: +# import cmtagmylar +# metaresponse = cmtagmylar.run(self.nzb_folder, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) +# except ImportError: +# logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') +# metaresponse = "fail" +# +# if metaresponse == "fail": +# logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') +# elif metaresponse == "unrar error": +# logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') +# #launch failed download handling here. +# elif metaresponse.startswith('file not found'): +# filename_in_error = os.path.split(metaresponse, '||')[1] +# self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...") +# logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Attempting to continue without metatagging...') +# else: +# odir = os.path.split(metaresponse)[0] +# ofilename = os.path.split(metaresponse)[1] +# ext = os.path.splitext(metaresponse)[1] +# logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') +# self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') +# +# dfilename = ofilename +# +# if metaresponse: +# src_location = odir +# else: +# src_location = self.nzb_folder +# +# grab_src = os.path.join(src_location, ofilename) +# self._log("Source Path : " + grab_src) +# logger.info(module + ' Source Path : ' + grab_src) +# +# checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) +# if not checkdirectory: +# logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) +# +# #send to renamer here if valid. +# if mylar.RENAME_FILES: +# renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) +# if renamed_file: +# dfilename = renamed_file['nfilename'] +# logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) +# +# +# if sandwich is not None and 'S' in sandwich: +# #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename +# if mylar.READ2FILENAME: +# logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) +# if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) +# elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) +# else: readord = str(arcdata['ReadingOrder']) +# dfilename = str(readord) + "-" + dfilename +# else: +# dfilename = ofilename +# grab_dst = os.path.join(grdst, dfilename) +# else: +# grab_dst = os.path.join(grdst, ofilename) +# +# self._log("Destination Path : " + grab_dst) +# +# logger.info(module + ' Destination Path : ' + grab_dst) +# +# logger.info(module + '[' + mylar.FILE_OPTS + '] ' + str(ofilename) + ' into directory : ' + str(grab_dst)) +# +# try: +# fileoperation = helpers.file_ops(grab_src, grab_dst) +# if not fileoperation: +# raise OSError +# except (OSError, IOError): +# logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') +# self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") +# return +# +# #tidyup old path +# if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): +# self.tidyup(src_location, True) +# +# #delete entry from nzblog table +# myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) +# +# if sandwich is not None and 'S' in sandwich: +# #issuearcid = re.sub('S', '', issueid) +# logger.info(module + ' IssueArcID is : ' + str(issuearcid)) +# ctrlVal = {"IssueArcID": issuearcid} +# newVal = {"Status": "Downloaded", +# "Location": grab_dst} +# myDB.upsert("readinglist", newVal, ctrlVal) +# logger.info(module + ' Updated status to Downloaded') +# +# logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) +# self._log(u"Post Processing SUCCESSFUL! ") +# elif self.oneoff is True: +# logger.info(module + ' IssueID is : ' + str(issueid)) +# ctrlVal = {"IssueID": issueid} +# newVal = {"Status": "Downloaded"} +# logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) +# myDB.upsert("weekly", newVal, ctrlVal) +# logger.info(module + ' Updated status to Downloaded') +# myDB.upsert("oneoffhistory", newVal, ctrlVal) +# logger.info(module + ' Updated history for one-off\'s for tracking purposes') +# logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) +# self._log(u"Post Processing SUCCESSFUL! ") +# +# try: +# self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) +# except: +# pass +# +# self.valreturn.append({"self.log": self.log, +# "mode": 'stop'}) +# return self.queue.put(self.valreturn) if self.nzb_name == 'Manual Run': @@ -1210,8 +1302,9 @@ class PostProcessor(object): return elif len(manual_arclist) > 0: logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') - + i = 0 + logger.info('manual list: %s' % manual_list) for ml in manual_list: i+=1 comicid = ml['ComicID'] @@ -1232,7 +1325,7 @@ class PostProcessor(object): break dupthis = helpers.duplicate_filecheck(ml['ComicLocation'], ComicID=comicid, IssueID=issueid) - if dupthis[0]['action'] == 'dupe_src' or dupthis[0]['action'] == 'dupe_file': + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) @@ -1242,50 +1335,362 @@ class PostProcessor(object): logger.warn('Unable to move duplicate file - skipping post-processing of this file.') continue - - if dupthis[0]['action'] == "write" or dupthis[0]['action'] == 'dupe_src': + if any([dupthis['action'] == "write", dupthis['action'] == 'dupe_src']): stat = ' [' + str(i) + '/' + str(len(manual_list)) + ']' self.Process_next(comicid, issueid, issuenumOG, ml, stat) dupthis = None - logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + if self.failed_files == 0: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + else: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues [FAILED: ' + str(self.failed_files) + ']') return else: - comicid = issuenzb['ComicID'] - issuenumOG = issuenzb['Issue_Number'] - #the self.nzb_folder should contain only the existing filename - dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) - if dupthis[0]['action'] == 'dupe_src' or dupthis[0]['action'] == 'dupe_file': + pass + # comicid = issuenzb['ComicID'] + # issuenumOG = issuenzb['Issue_Number'] + # #the self.nzb_folder should contain only the existing filename + # dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) + # if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + # #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + # #'dupe_file' - do not write new file as existing file is better quality + # #'dupe_src' - write new file, as existing file is a lesser quality (dupe) + # if mylar.DUPLICATE_DUMP: + # if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): + # dupchkit = self.duplicate_process(dupthis) + # if dupchkit == False: + # logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + # self.valreturn.append({"self.log": self.log, + # "mode": 'stop', + # "issueid": issueid, + # "comicid": comicid}) + # return self.queue.put(self.valreturn) + # + # if dupthis['action'] == "write" or dupthis['action'] == 'dupe_src': + # return self.Process_next(comicid, issueid, issuenumOG) + # else: + # self.valreturn.append({"self.log": self.log, + # "mode": 'stop', + # "issueid": issueid, + # "comicid": comicid}) + # return self.queue.put(self.valreturn) + + def nzb_or_oneoff_pp(self, tinfo=None, manual=None): + module = self.module + myDB = db.DBConnection() + if manual is None: + issueid = tinfo['issueid'] + comicid = tinfo['comicid'] + comicname = tinfo['comicname'] + issuenumber = tinfo['issuenumber'] + publisher = tinfo['publisher'] + sarc = tinfo['sarc'] + oneoff = tinfo['oneoff'] + if oneoff is True: + location = os.path.abspath(os.path.join(tinfo['comiclocation'], os.pardir)) + else: + location = self.nzb_folder + annchk = "no" + issuenzb = myDB.selectone("SELECT * from issues WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if issuenzb is None: + logger.info(module + ' Could not detect as a standard issue - checking against annuals.') + issuenzb = myDB.selectone("SELECT * from annuals WHERE IssueID=? AND ComicName NOT NULL", [issueid]).fetchone() + if issuenzb is None: + logger.info(module + ' issuenzb not found.') + #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume + #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. + if 'S' in issueid: + sandwich = issueid + elif 'G' in issueid or '-' in issueid: + sandwich = 1 + elif any([oneoff is True, issueid >= '900000', issueid == '1']): + logger.info(module + ' [ONE-OFF POST-PROCESSING] One-off download detected. Post-processing as a non-watchlist item.') + sandwich = None #arbitrarily set it to None just to force one-off downloading below. + else: + logger.error(module + ' Unable to locate downloaded file as being initiated via Mylar. Attempting to parse the filename directly and process.') + self._log('Unable to locate downloaded file within items I have snatched. Attempting to parse the filename directly and process.') + self.valreturn.append({"self.log": self.log, + "mode": 'outside'}) + return self.queue.put(self.valreturn) + else: + logger.info(module + ' Successfully located issue as an annual. Continuing.') + annchk = "yes" + + if issuenzb is not None: + logger.info(module + ' issuenzb found.') + if helpers.is_number(issueid): + sandwich = int(issuenzb['IssueID']) + if sandwich is not None and helpers.is_number(sandwich): + if sandwich < 900000: + # if sandwich is less than 900000 it's a normal watchlist download. Bypass. + pass + else: + if any([oneoff is True, issuenzb is None]) or all([sandwich is not None, 'S' in sandwich]) or int(sandwich) >= 900000: + # this has no issueID, therefore it's a one-off or a manual post-proc. + # At this point, let's just drop it into the Comic Location folder and forget about it.. + if sandwich is not None and 'S' in sandwich: + self._log("One-off STORYARC mode enabled for Post-Processing for " + sarc) + logger.info(module + ' One-off STORYARC mode enabled for Post-Processing for ' + sarc) + else: + self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.") + logger.info(module + ' One-off mode enabled for Post-Processing. Will move into Grab-bag directory.') + self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR) + grdst = mylar.GRABBAG_DIR + + odir = location + ofilename = tinfo['comiclocation'] + path, ext = os.path.splitext(ofilename) + + if ofilename is None: + logger.error(module + ' Unable to post-process file as it is not in a valid cbr/cbz format. PostProcessing aborted.') + self._log('Unable to locate downloaded file to rename. PostProcessing aborted.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + if odir is None: + odir = self.nzb_folder + + if sandwich is not None and 'S' in sandwich: + issuearcid = re.sub('S', '', issueid) + logger.fdebug(module + ' issuearcid:' + str(issuearcid)) + arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() + if arcdata is None: + logger.warn(module + ' Unable to locate issue within Story Arcs. Cannot post-process at this time - try to Refresh the Arc and manual post-process if necessary.') + self._log('Unable to locate issue within Story Arcs in orde to properly assign metadata. PostProcessing aborted.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + if arcdata['Publisher'] is None: + arcpub = arcdata['IssuePublisher'] + else: + arcpub = arcdata['Publisher'] + + grdst = helpers.arcformat(arcdata['StoryArc'], helpers.spantheyears(arcdata['StoryArcID']), arcpub) + + if comicid is None: + comicid = arcdata['ComicID'] + if comicname is None: + comicname = arcdata['ComicName'] + if issuenumber is None: + issuenumber = arcdata['IssueNumber'] + issueid = arcdata['IssueID'] + + #tag the meta. + metaresponse = None + crcvalue = helpers.crc(os.path.join(location, ofilename)) + + #if a one-off download from the pull-list, will not have an issueid associated with it, and will fail to due conversion/tagging. + #if altpull/2 method is being used, issueid may already be present so conversion/tagging is possible with some additional fixes. + if all([mylar.ENABLE_META, issueid is not None]): + self._log("Metatagging enabled - proceeding...") + try: + import cmtagmylar + metaresponse = cmtagmylar.run(location, issueid=issueid, filename=os.path.join(self.nzb_folder, ofilename)) + except ImportError: + logger.warn(module + ' comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/') + metaresponse = "fail" + + if metaresponse == "fail": + logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without metatagging...') + elif any([metaresponse == "unrar error", metaresponse == "corrupt"]): + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying it.') + #launch failed download handling here. + elif metaresponse.startswith('file not found'): + filename_in_error = os.path.split(metaresponse, '||')[1] + self._log("The file cannot be found in the location provided for metatagging [" + filename_in_error + "]. Please verify it exists, and re-run if necessary.") + logger.error(module + ' The file cannot be found in the location provided for metagging [' + filename_in_error + ']. Please verify it exists, and re-run if necessary.') + else: + odir = os.path.split(metaresponse)[0] + ofilename = os.path.split(metaresponse)[1] + ext = os.path.splitext(metaresponse)[1] + logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') + self._log('Sucessfully wrote metadata to .cbz (' + ofilename + ') - proceeding...') + + dfilename = ofilename + if metaresponse: + src_location = odir + else: + src_location = location + + grab_src = os.path.join(src_location, ofilename) + self._log("Source Path : " + grab_src) + logger.info(module + ' Source Path : ' + grab_src) + + checkdirectory = filechecker.validateAndCreateDirectory(grdst, True, module=module) + if not checkdirectory: + logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + return self.queue.put(self.valreturn) + + #send to renamer here if valid. + if mylar.RENAME_FILES: + renamed_file = helpers.rename_param(comicid, comicname, issuenumber, dfilename, issueid=issueid, arc=sarc) + if renamed_file: + dfilename = renamed_file['nfilename'] + logger.fdebug(module + ' Renaming file to conform to configuration: ' + ofilename) + + if sandwich is not None and 'S' in sandwich: + #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename + if mylar.READ2FILENAME: + logger.fdebug(module + ' readingorder#: ' + str(arcdata['ReadingOrder'])) + if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) + elif int(arcdata['ReadingOrder']) >= 10 and int(arcdata['ReadingOrder']) <= 99: readord = "0" + str(arcdata['ReadingOrder']) + else: readord = str(arcdata['ReadingOrder']) + dfilename = str(readord) + "-" + dfilename + else: + dfilename = ofilename + grab_dst = os.path.join(grdst, dfilename) + else: + grab_dst = os.path.join(grdst, ofilename) + + self._log("Destination Path : " + grab_dst) + + logger.info(module + ' Destination Path : ' + grab_dst) + logger.info(module + '[' + mylar.FILE_OPTS + '] ' + ofilename + ' into directory : ' + grab_dst) + + try: + fileoperation = helpers.file_ops(grab_src, grab_dst) + if not fileoperation: + raise OSError + except (OSError, IOError): + logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') + self._log("Failed to " + mylar.FILE_OPTS + " " + src + " - check directories and manually re-run.") + return + + #tidyup old path + if any([mylar.FILE_OPTS == 'move', mylar.FILE_OPTS == 'copy']): + self.tidyup(src_location, True) + + #delete entry from nzblog table + myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) + + if sandwich is not None and 'S' in sandwich: + logger.info(module + ' IssueArcID is : ' + str(issuearcid)) + ctrlVal = {"IssueArcID": issuearcid} + newVal = {"Status": "Downloaded", + "Location": grab_dst} + myDB.upsert("readinglist", newVal, ctrlVal) + logger.info(module + ' Updated status to Downloaded') + + logger.info(module + ' Post-Processing completed for: [' + sarc + '] ' + grab_dst) + self._log(u"Post Processing SUCCESSFUL! ") + elif oneoff is True: + logger.info(module + ' IssueID is : ' + str(issueid)) + ctrlVal = {"IssueID": issueid} + newVal = {"Status": "Downloaded"} + logger.info(module + ' Writing to db: ' + str(newVal) + ' -- ' + str(ctrlVal)) + myDB.upsert("weekly", newVal, ctrlVal) + logger.info(module + ' Updated status to Downloaded') + myDB.upsert("oneoffhistory", newVal, ctrlVal) + logger.info(module + ' Updated history for one-off\'s for tracking purposes') + logger.info(module + ' Post-Processing completed for: [ %s #%s ] %s' % (comicname, issuenumber, grab_dst)) + self._log(u"Post Processing SUCCESSFUL! ") + + try: + self.sendnotify(comicname, issueyear=None, issuenumOG=issuenumber, annchk=annchk, module=module) + except: + pass + + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + + return self.queue.put(self.valreturn) + + else: + manual_list = manual + + if self.nzb_name == 'Manual Run': + #loop through the hits here. + if len(manual_list) == 0 and len(manual_arclist) == 0: + logger.info(module + ' No matches for Manual Run ... exiting.') + return + elif len(manual_arclist) > 0 and len(manual_list) == 0: + logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') + return + elif len(manual_arclist) > 0: + logger.info(module + ' Manual post-processing completed for ' + str(len(manual_arclist)) + ' story-arc issues.') + i = 0 + logger.info('manual list: %s' % manual_list) + for ml in manual_list: + i+=1 + comicid = ml['ComicID'] + issueid = ml['IssueID'] + issuenumOG = ml['IssueNumber'] + #check to see if file is still being written to. + while True: + waiting = False + try: + ctime = max(os.path.getctime(ml['ComicLocation']), os.path.getmtime(ml['ComicLocation'])) + if time.time() > ctime > time.time() - 10: + time.sleep(max(time.time() - ctime, 0)) + waiting = True + else: + break + except: + #file is no longer present in location / can't be accessed. + break + + dupthis = helpers.duplicate_filecheck(ml['ComicLocation'], ComicID=comicid, IssueID=issueid) + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + #'dupe_file' - do not write new file as existing file is better quality #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) - if mylar.DUPLICATE_DUMP: - if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): - dupchkit = self.duplicate_process(dupthis) - if dupchkit == False: - logger.warn('Unable to move duplicate file - skipping post-processing of this file.') - self.valreturn.append({"self.log": self.log, - "mode": 'stop', - "issueid": issueid, - "comicid": comicid}) + if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): #DUPLICATE_DUMP + dupchkit = self.duplicate_process(dupthis) + if dupchkit == False: + logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + continue - return self.queue.put(self.valreturn) - - if dupthis[0]['action'] == "write" or dupthis[0]['action'] == 'dupe_src': - return self.Process_next(comicid, issueid, issuenumOG) - else: - self.valreturn.append({"self.log": self.log, - "mode": 'stop', - "issueid": issueid, - "comicid": comicid}) + if any([dupthis['action'] == "write", dupthis['action'] == 'dupe_src']): + stat = ' [' + str(i) + '/' + str(len(manual_list)) + ']' + self.Process_next(comicid, issueid, issuenumOG, ml, stat) + dupthis = None + + if self.failed_files == 0: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues.') + else: + logger.info(module + ' Manual post-processing completed for ' + str(i) + ' issues [FAILED: ' + str(self.failed_files) + ']') + return + + else: + comicid = issuenzb['ComicID'] + issuenumOG = issuenzb['Issue_Number'] + #the self.nzb_folder should contain only the existing filename + dupthis = helpers.duplicate_filecheck(self.nzb_folder, ComicID=comicid, IssueID=issueid) + if dupthis['action'] == 'dupe_src' or dupthis['action'] == 'dupe_file': + #check if duplicate dump folder is enabled and if so move duplicate file in there for manual intervention. + #'dupe_file' - do not write new file as existing file is better quality + #'dupe_src' - write new file, as existing file is a lesser quality (dupe) + if mylar.DUPLICATE_DUMP: + if mylar.DDUMP and not all([mylar.DUPLICATE_DUMP is None, mylar.DUPLICATE_DUMP == '']): + dupchkit = self.duplicate_process(dupthis) + if dupchkit == False: + logger.warn('Unable to move duplicate file - skipping post-processing of this file.') + self.valreturn.append({"self.log": self.log, + "mode": 'stop', + "issueid": issueid, + "comicid": comicid}) + return self.queue.put(self.valreturn) + + if dupthis['action'] == "write" or dupthis['action'] == 'dupe_src': + return self.Process_next(comicid, issueid, issuenumOG) + else: + self.valreturn.append({"self.log": self.log, + "mode": 'stop', + "issueid": issueid, + "comicid": comicid}) + return self.queue.put(self.valreturn) - return self.queue.put(self.valreturn) def Process_next(self, comicid, issueid, issuenumOG, ml=None, stat=None): if stat is None: stat = ' [1/1]' module = self.module annchk = "no" - extensions = ('.cbr', '.cbz', '.pdf') snatchedtorrent = False myDB = db.DBConnection() comicnzb = myDB.selectone("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() @@ -1297,7 +1702,6 @@ class PostProcessor(object): else: logger.fdebug(module + ' Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.') snatchedtorrent = True - if issuenzb is None: issuenzb = myDB.selectone("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid, comicid]).fetchone() annchk = "yes" @@ -1577,7 +1981,7 @@ class PostProcessor(object): ofilename = None for root, dirnames, filenames in os.walk(self.nzb_folder, followlinks=True): for filename in filenames: - if filename.lower().endswith(extensions): + if filename.lower().endswith(self.extensions): odir = root logger.fdebug(module + ' odir (root): ' + odir) ofilename = filename @@ -1594,6 +1998,7 @@ class PostProcessor(object): if ofilename is None: self._log("Unable to locate a valid cbr/cbz file. Aborting post-processing for this filename.") logger.error(module + ' unable to locate a valid cbr/cbz file. Aborting post-processing for this filename.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1641,23 +2046,32 @@ class PostProcessor(object): if pcheck == "fail": self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") logger.fdebug(module + ' Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...') + self.failed_files +=1 #we need to set this to the cbz file since not doing it will result in nothing getting moved. #not sure how to do this atm - elif pcheck == "unrar error": - self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") - logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying a different copy.') - self.valreturn.append({"self.log": self.log, - "mode": 'fail', - "issueid": issueid, - "comicid": comicid, - "comicname": comicnzb['ComicName'], - "issuenumber": issuenzb['Issue_Number'], - "annchk": annchk}) + elif any([pcheck == "unrar error", pcheck == "corrupt"]): + if ml is not None: + self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and not post-processing.") + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and not post-processing.') + self.failed_files +=1 + self.valreturn.append({"self.log": self.log, + "mode": 'stop'}) + else: + self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") + logger.error(module + ' This is a corrupt archive - whether CRC errors or it is incomplete. Marking as BAD, and retrying a different copy.') + self.valreturn.append({"self.log": self.log, + "mode": 'fail', + "issueid": issueid, + "comicid": comicid, + "comicname": comicnzb['ComicName'], + "issuenumber": issuenzb['Issue_Number'], + "annchk": annchk}) return self.queue.put(self.valreturn) elif pcheck.startswith('file not found'): filename_in_error = os.path.split(pcheck, '||')[1] self._log("The file cannot be found in the location provided [" + filename_in_error + "]. Please verify it exists, and re-run if necessary. Aborting.") logger.error(module + ' The file cannot be found in the location provided [' + filename_in_error + ']. Please verify it exists, and re-run if necessary. Aborting') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1669,7 +2083,7 @@ class PostProcessor(object): ext = os.path.splitext(ofilename)[1] self._log("Sucessfully wrote metadata to .cbz - Continuing..") logger.info(module + ' Sucessfully wrote metadata to .cbz (' + ofilename + ') - Continuing..') - #if this is successful, and we're copying to dst then set the file op to move this cbz so we + #if this is successful, and we're copying to dst then set the file op to move this cbz so we #don't leave a cbr/cbz in the origianl directory. #self.fileop = shutil.move #Run Pre-script @@ -1715,7 +2129,7 @@ class PostProcessor(object): # ofilename = None # for root, dirnames, filenames in os.walk(self.nzb_folder, followlinks=True): # for filename in filenames: -# if filename.lower().endswith(extensions): +# if filename.lower().endswith(self.extensions): # odir = root # logger.fdebug(module + ' odir (root): ' + odir) # ofilename = filename @@ -1752,6 +2166,7 @@ class PostProcessor(object): if any([ofilename == odir, ofilename == odir[:-1], ofilename == '']): self._log("There was a problem deciphering the filename/directory - please verify that the filename : [" + ofilename + "] exists in location [" + odir + "]. Aborting.") logger.error(module + ' There was a problem deciphering the filename/directory - please verify that the filename : [' + ofilename + '] exists in location [' + odir + ']. Aborting.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1762,6 +2177,7 @@ class PostProcessor(object): if ofilename is None or ofilename == '': logger.error(module + ' Aborting PostProcessing - the filename does not exist in the location given. Make sure that ' + self.nzb_folder + ' exists and is the correct location.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1774,7 +2190,7 @@ class PostProcessor(object): self._log("Rename Files isn't enabled...keeping original filename.") logger.fdebug(module + ' Rename Files is not enabled - keeping original filename.') #check if extension is in nzb_name - will screw up otherwise - if ofilename.lower().endswith(extensions): + if ofilename.lower().endswith(self.extensions): nfilename = ofilename[:-4] else: nfilename = ofilename @@ -1793,6 +2209,7 @@ class PostProcessor(object): checkdirectory = filechecker.validateAndCreateDirectory(comlocation, True, module=module) if not checkdirectory: logger.warn(module + ' Error trying to validate/create directory. Aborting this process at this time.') + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1867,7 +2284,7 @@ class PostProcessor(object): except (OSError, IOError): logger.fdebug(module + ' Failed to ' + mylar.FILE_OPTS + ' ' + src + ' - check directories and manually re-run.') logger.fdebug(module + ' Post-Processing ABORTED.') - + self.failed_files +=1 self.valreturn.append({"self.log": self.log, "mode": 'stop'}) return self.queue.put(self.valreturn) @@ -1948,7 +2365,7 @@ class PostProcessor(object): logger.fdebug(module + ' Destination Path : ' + grab_dst) grab_src = dst - logger.fdebug(module + ' Source Path : ' + grab_src) + logger.fdebug(module + ' Source Path : ' + grab_src) logger.info(module + '[' + mylar.ARC_FILEOPS.upper() + '] ' + str(dst) + ' into directory : ' + str(grab_dst)) try: @@ -2032,7 +2449,7 @@ class PostProcessor(object): def sendnotify(self, series, issueyear, issuenumOG, annchk, module): - + if annchk == "no": if issueyear is None: prline = series + ' - issue #' + issuenumOG @@ -2076,7 +2493,7 @@ class PostProcessor(object): if mylar.TELEGRAM_ENABLED: telegram = notifiers.TELEGRAM() telegram.notify(prline, prline2) - + if mylar.SLACK_ENABLED: slack = notifiers.SLACK() slack.notify("Download and Postprocessing completed", prline, module=module) @@ -2098,7 +2515,11 @@ class FolderCheck(): return #monitor a selected folder for 'snatched' files that haven't been processed #junk the queue as it's not needed for folder monitoring, but needed for post-processing to run without error. + helpers.job_management(write=True, job='Folder Monitor', current_run=helpers.utctimestamp(), status='Running') + mylar.MONITOR_STATUS = 'Running' logger.info(self.module + ' Checking folder ' + mylar.CHECK_FOLDER + ' for newly snatched downloads') PostProcess = PostProcessor('Manual Run', mylar.CHECK_FOLDER, queue=self.queue) result = PostProcess.Process() logger.info(self.module + ' Finished checking for newly snatched downloads') + helpers.job_management(write=True, job='Folder Monitor', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.MONITOR_STATUS = 'Waiting' diff --git a/mylar/__init__.py b/mylar/__init__.py index 92cfd8bd..3b357b4c 100644 --- a/mylar/__init__.py +++ b/mylar/__init__.py @@ -33,12 +33,14 @@ import locale import re from threading import Lock, Thread -from apscheduler.scheduler import Scheduler +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.triggers.interval import IntervalTrigger + from configobj import ConfigObj import cherrypy -from mylar import logger, versioncheckit, rsscheckit, searchit, weeklypullit, dbupdater, PostProcessor, helpers, scheduler #versioncheck, rsscheck, search, PostProcessor, weeklypull, helpers, scheduler +from mylar import logger, versioncheckit, rsscheckit, searchit, weeklypullit, PostProcessor, updater, helpers FULL_PATH = None PROG_DIR = None @@ -65,9 +67,15 @@ IMPORT_PARSED_COUNT = 0 IMPORT_FAILURE_COUNT = 0 CHECKENABLED = False -SCHED = Scheduler() +SCHED = BackgroundScheduler({ + 'apscheduler.executors.default': { + 'class': 'apscheduler.executors.pool:ThreadPoolExecutor', + 'max_workers': '20' + }, + 'apscheduler.job_defaults.coalesce': 'false', + 'apscheduler.job_defaults.max_instances': '3', + 'apscheduler.timezone': 'UTC'}) -INIT_LOCK = threading.Lock() __INITIALIZED__ = False started = False WRITELOCK = False @@ -76,6 +84,20 @@ IMPORTLOCK = False ## for use with updated scheduler (not working atm) INIT_LOCK = Lock() +SCHED_DBUPDATE_LAST = None +SCHED_RSS_LAST = None +SCHED_SEARCH_LAST = None +SCHED_WEEKLY_LAST = None +SCHED_VERSION_LAST = None +SCHED_MONITOR_LAST = None + +MONITOR_STATUS = 'Waiting' +SEARCH_STATUS = 'Waiting' +RSS_STATUS = 'Waiting' +WEEKLY_STATUS = 'Waiting' +VERSION_STATUS = 'Waiting' +UPDATER_STATUS = 'Waiting' + dbUpdateScheduler = None searchScheduler = None RSSScheduler = None @@ -366,7 +388,6 @@ UPCOMING_SNATCHED = 1 ENABLE_RSS = 0 RSS_CHECKINTERVAL = 20 -RSS_LASTRUN = None #these are used to set the comparison against the post-processing scripts STATIC_COMICRN_VERSION = "1.01" @@ -404,6 +425,7 @@ TPSE_VERIFY = True ENABLE_32P = 0 SEARCH_32P = 0 #0 = use WS to grab torrent groupings, #1 = use 32P to grab torrent groupings +DEEP_SEARCH_32P = 0 #0 = do not take multiple search series results & use ref32p if available, #1= search each search series result for valid issue & posting date MODE_32P = None #0 = legacymode, #1 = authmode KEYS_32P = None RSSFEED_32P = None @@ -411,6 +433,7 @@ PASSKEY_32P = None USERNAME_32P = None PASSWORD_32P = None AUTHKEY_32P = None +INKDROPS_32P = None FEEDINFO_32P = None VERIFY_32P = 1 SNATCHEDTORRENT_NOTIFY = 0 @@ -516,9 +539,11 @@ def initialize(): USE_UTORRENT, UTORRENT_HOST, UTORRENT_USERNAME, UTORRENT_PASSWORD, UTORRENT_LABEL, USE_TRANSMISSION, TRANSMISSION_HOST, TRANSMISSION_USERNAME, TRANSMISSION_PASSWORD, TRANSMISSION_DIRECTORY, USE_DELUGE, DELUGE_HOST, DELUGE_USERNAME, DELUGE_PASSWORD, DELUGE_LABEL, \ USE_QBITTORRENT, QBITTORRENT_HOST, QBITTORRENT_USERNAME, QBITTORRENT_PASSWORD, QBITTORRENT_LABEL, QBITTORRENT_FOLDER, QBITTORRENT_STARTONLOAD, \ ENABLE_META, CMTAGGER_PATH, CBR2CBZ_ONLY, CT_TAG_CR, CT_TAG_CBL, CT_CBZ_OVERWRITE, UNRAR_CMD, CT_SETTINGSPATH, CMTAG_VOLUME, CMTAG_START_YEAR_AS_VOLUME, UPDATE_ENDED, INDIE_PUB, BIGGIE_PUB, IGNORE_HAVETOTAL, SNATCHED_HAVETOTAL, PROVIDER_ORDER, TMP_PROV, \ - dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \ + SCHED, dbUpdateScheduler, searchScheduler, RSSScheduler, WeeklyScheduler, VersionScheduler, FolderMonitorScheduler, \ + SCHED_DBUPDATE_LAST, SCHED_RSS_LAST, SCHED_SEARCH_LAST, SCHED_WEEKLY_LAST, SCHED_VERSION_LAST, SCHED_MONITOR_LAST, \ + MONITOR_STATUS, SEARCH_STATUS, UPDATER_STATUS, VERSION_STATUS, WEEKLY_STATUS, RSS_STATUS, \ ALLOW_PACKS, ENABLE_TORRENTS, TORRENT_DOWNLOADER, MINSEEDS, USE_WATCHDIR, TORRENT_LOCAL, LOCAL_WATCHDIR, TORRENT_SEEDBOX, SEEDBOX_HOST, SEEDBOX_PORT, SEEDBOX_USER, SEEDBOX_PASS, SEEDBOX_WATCHDIR, \ - ENABLE_RSS, RSS_CHECKINTERVAL, RSS_LASTRUN, FAILED_DOWNLOAD_HANDLING, FAILED_AUTO, ENABLE_TORRENT_SEARCH, ENABLE_TPSE, WWTURL, DEMURL, TPSEURL, TPSE_PROXY, TPSE_VERIFY, ENABLE_32P, SEARCH_32P, MODE_32P, KEYS_32P, RSSFEED_32P, USERNAME_32P, PASSWORD_32P, AUTHKEY_32P, PASSKEY_32P, FEEDINFO_32P, VERIFY_32P, SNATCHEDTORRENT_NOTIFY, \ + ENABLE_RSS, RSS_CHECKINTERVAL, FAILED_DOWNLOAD_HANDLING, FAILED_AUTO, ENABLE_TORRENT_SEARCH, ENABLE_TPSE, WWTURL, DEMURL, TPSEURL, TPSE_PROXY, TPSE_VERIFY, ENABLE_32P, SEARCH_32P, DEEP_SEARCH_32P, MODE_32P, KEYS_32P, RSSFEED_32P, USERNAME_32P, PASSWORD_32P, AUTHKEY_32P, INKDROPS_32P, PASSKEY_32P, FEEDINFO_32P, VERIFY_32P, SNATCHEDTORRENT_NOTIFY, \ PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, BOXCAR_ENABLED, BOXCAR_ONSNATCH, BOXCAR_TOKEN, \ PUSHBULLET_ENABLED, PUSHBULLET_APIKEY, PUSHBULLET_DEVICEID, PUSHBULLET_CHANNEL_TAG, PUSHBULLET_ONSNATCH, LOCMOVE, NEWCOM_DIR, FFTONEWCOM_DIR, \ PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, \ @@ -764,7 +789,6 @@ def initialize(): ENABLE_RSS = bool(check_setting_int(CFG, 'General', 'enable_rss', 1)) RSS_CHECKINTERVAL = check_setting_str(CFG, 'General', 'rss_checkinterval', '20') - RSS_LASTRUN = check_setting_str(CFG, 'General', 'rss_lastrun', '') FAILED_DOWNLOAD_HANDLING = bool(check_setting_int(CFG, 'General', 'failed_download_handling', 0)) FAILED_AUTO = bool(check_setting_int(CFG, 'General', 'failed_auto', 0)) @@ -794,6 +818,7 @@ def initialize(): else: ENABLE_32P = bool(check_setting_int(CFG, 'Torrents', 'enable_32p', 0)) SEARCH_32P = bool(check_setting_int(CFG, 'Torrents', 'search_32p', 0)) + DEEP_SEARCH_32P = bool(check_setting_int(CFG, 'Torrents', 'deep_search_32p', 0)) MODE_32P = check_setting_int(CFG, 'Torrents', 'mode_32p', 0) #legacy support of older config - reload into old values for consistency. @@ -1291,49 +1316,49 @@ def initialize(): COMICSORT = helpers.ComicSort(sequence='startup') #initialize the scheduler threads here. - dbUpdateScheduler = scheduler.Scheduler(action=dbupdater.dbUpdate(), - cycleTime=datetime.timedelta(hours=48), - runImmediately=False, - threadName="DBUPDATE") + #dbUpdateScheduler = scheduler.Scheduler(action=dbupdater.dbUpdate(), + # cycleTime=datetime.timedelta(minutes=5), + # runImmediately=False, + # threadName="DBUPDATE") - if NZB_STARTUP_SEARCH: - searchrunmode = True - else: - searchrunmode = False + #if NZB_STARTUP_SEARCH: + # searchrunmode = True + #else: + # searchrunmode = False - searchScheduler = scheduler.Scheduler(searchit.CurrentSearcher(), - cycleTime=datetime.timedelta(minutes=SEARCH_INTERVAL), - threadName="SEARCH", - runImmediately=searchrunmode) - - RSSScheduler = scheduler.Scheduler(rsscheckit.tehMain(), - cycleTime=datetime.timedelta(minutes=int(RSS_CHECKINTERVAL)), - threadName="RSSCHECK", - runImmediately=True, - delay=30) - - if ALT_PULL == 2: - weektimer = 4 - else: - weektimer = 24 - - WeeklyScheduler = scheduler.Scheduler(weeklypullit.Weekly(), - cycleTime=datetime.timedelta(hours=weektimer), - threadName="WEEKLYCHECK", - runImmediately=True, - delay=10) - - VersionScheduler = scheduler.Scheduler(versioncheckit.CheckVersion(), - cycleTime=datetime.timedelta(minutes=CHECK_GITHUB_INTERVAL), - threadName="VERSIONCHECK", - runImmediately=False) + #searchScheduler = scheduler.Scheduler(searchit.CurrentSearcher(), + # cycleTime=datetime.timedelta(minutes=SEARCH_INTERVAL), + # threadName="SEARCH", + # runImmediately=searchrunmode) - FolderMonitorScheduler = scheduler.Scheduler(PostProcessor.FolderCheck(), - cycleTime=datetime.timedelta(minutes=int(DOWNLOAD_SCAN_INTERVAL)), - threadName="FOLDERMONITOR", - runImmediately=True, - delay=60) + #RSSScheduler = scheduler.Scheduler(rsscheckit.tehMain(), + # cycleTime=datetime.timedelta(minutes=int(RSS_CHECKINTERVAL)), + # threadName="RSSCHECK", + # runImmediately=True, + # delay=30) + + #if ALT_PULL == 2: + # weektimer = 4 + #else: + # weektimer = 24 + + #WeeklyScheduler = scheduler.Scheduler(weeklypullit.Weekly(), + # cycleTime=datetime.timedelta(hours=weektimer), + # threadName="WEEKLYCHECK", + # runImmediately=True, + # delay=10) + + #VersionScheduler = scheduler.Scheduler(versioncheckit.CheckVersion(), + # cycleTime=datetime.timedelta(minutes=CHECK_GITHUB_INTERVAL), + # threadName="VERSIONCHECK", + # runImmediately=False) + + #FolderMonitorScheduler = scheduler.Scheduler(PostProcessor.FolderCheck(), + # cycleTime=datetime.timedelta(minutes=int(DOWNLOAD_SCAN_INTERVAL)), + # threadName="FOLDERMONITOR", + # runImmediately=True, + # delay=60) # Store the original umask UMASK = os.umask(0) @@ -1558,7 +1583,6 @@ def config_write(): new_config['General']['upcoming_snatched'] = int(UPCOMING_SNATCHED) new_config['General']['enable_rss'] = int(ENABLE_RSS) new_config['General']['rss_checkinterval'] = RSS_CHECKINTERVAL - new_config['General']['rss_lastrun'] = RSS_LASTRUN new_config['General']['failed_download_handling'] = int(FAILED_DOWNLOAD_HANDLING) new_config['General']['failed_auto'] = int(FAILED_AUTO) @@ -1598,6 +1622,7 @@ def config_write(): new_config['Torrents']['tpse_verify'] = TPSE_VERIFY new_config['Torrents']['enable_32p'] = int(ENABLE_32P) new_config['Torrents']['search_32p'] = int(SEARCH_32P) + new_config['Torrents']['deep_search_32p'] = int(DEEP_SEARCH_32P) new_config['Torrents']['mode_32p'] = int(MODE_32P) new_config['Torrents']['passkey_32p'] = PASSKEY_32P new_config['Torrents']['rssfeed_32p'] = RSSFEED_32P @@ -1734,7 +1759,7 @@ def config_write(): new_config['TELEGRAM']['telegram_token'] = TELEGRAM_TOKEN new_config['TELEGRAM']['telegram_userid'] = TELEGRAM_USERID new_config['TELEGRAM']['telegram_onsnatch'] = int(TELEGRAM_ONSNATCH) - + new_config['SLACK'] = {} new_config['SLACK']['slack_enabled'] = int(SLACK_ENABLED) new_config['SLACK']['slack_webhook_url'] = SLACK_WEBHOOK_URL @@ -1744,69 +1769,119 @@ def config_write(): def start(): - global __INITIALIZED__, started, \ - dbUpdateScheduler, searchScheduler, RSSScheduler, \ - WeeklyScheduler, VersionScheduler, FolderMonitorScheduler + global __INITIALIZED__, started with INIT_LOCK: if __INITIALIZED__: + #load up the previous runs from the job sql table so we know stuff... + helpers.job_management() + # Start our scheduled background tasks - #from mylar import updater, search, PostProcessor + SCHED.add_job(func=updater.dbUpdate, id='dbupdater', name='DB Updater', args=[None,None,True], trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC')) + #let's do a run at the Wanted issues here (on startup) if enabled. + ss = searchit.CurrentSearcher() + if NZB_STARTUP_SEARCH: + SCHED.add_job(func=ss.run, id='search', next_run_time=datetime.datetime.now(), name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) + else: + if SCHED_SEARCH_LAST is not None: + search_timestamp = float(SCHED_SEARCH_LAST) + logger.fdebug('[AUTO-SEARCH] Search last run @ %s' % datetime.datetime.utcfromtimestamp(search_timestamp)) + else: + search_timestamp = helpers.utctimestamp() + (int(SEARCH_INTERVAL) *60) - #SCHED.add_interval_job(updater.dbUpdate, hours=48) - #SCHED.add_interval_job(search.searchforissue, minutes=SEARCH_INTERVAL) + duration_diff = (helpers.utctimestamp() - search_timestamp)/60 + logger.fdebug('[AUTO-SEARCH] duration_diff : %s' % duration_diff) + if duration_diff >= int(SEARCH_INTERVAL): + logger.fdebug('[AUTO-SEARCH]Auto-Search set to a delay of one minute before initialization as it has been %s minutes since the last run' % duration_diff) + SCHED.add_job(func=ss.run, id='search', name='Auto-Search', trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) + else: + search_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + ((int(SEARCH_INTERVAL) * 60) - (duration_diff*60))) + logger.fdebug('[AUTO-SEARCH] Scheduling next run @ %s every %s minutes' % (search_diff, SEARCH_INTERVAL)) + SCHED.add_job(func=ss.run, id='search', name='Auto-Search', next_run_time=search_diff, trigger=IntervalTrigger(hours=0, minutes=SEARCH_INTERVAL, timezone='UTC')) if all([ENABLE_TORRENTS, AUTO_SNATCH, OS_DETECT != 'Windows']) and any([TORRENT_DOWNLOADER == 2, TORRENT_DOWNLOADER == 4]): - logger.info('Auto-Snatch of comleted torrents enabled & attempting to backgroun load....') + logger.info('[AUTO-SNATCHER] Auto-Snatch of completed torrents enabled & attempting to backgroun load....') SNPOOL = threading.Thread(target=helpers.worker_main, args=(SNATCHED_QUEUE,), name="AUTO-SNATCHER") SNPOOL.start() - logger.info('Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....') - - #start the db updater scheduler - logger.info('Initializing the DB Updater.') - dbUpdateScheduler.thread.start() - - #start the search scheduler - searchScheduler.thread.start() + logger.info('[AUTO-SNATCHER] Succesfully started Auto-Snatch add-on - will now monitor for completed torrents on client....') helpers.latestdate_fix() #initiate startup rss feeds for torrents/nzbs here... if ENABLE_RSS: - #SCHED.add_interval_job(rsscheck.tehMain, minutes=int(RSS_CHECKINTERVAL)) - RSSScheduler.thread.start() - logger.info('Initiating startup-RSS feed checks.') - #rsscheck.tehMain() + logger.info('[RSS-FEEDS] Initiating startup-RSS feed checks.') + if SCHED_RSS_LAST is not None: + rss_timestamp = float(SCHED_RSS_LAST) + logger.info('[RSS-FEEDS] RSS last run @ %s' % datetime.datetime.utcfromtimestamp(rss_timestamp)) + else: + rss_timestamp = helpers.utctimestamp() + (int(RSS_CHECKINTERVAL) *60) + rs = rsscheckit.tehMain() + logger.fdebug('[RSS-FEEDS] rss_timestamp: %s' % rss_timestamp) + logger.fdebug('[RSS-FEEDS] utcfromtimestamp: %s' % helpers.utctimestamp()) + logger.fdebug('[RSS-FEEDS] rss_checkinterval: %s' % (int(RSS_CHECKINTERVAL) * 60)) + logger.fdebug('[RSS-FEEDS] today: %s' % datetime.datetime.utcfromtimestamp(helpers.utctimestamp())) + duration_diff = (helpers.utctimestamp() - rss_timestamp)/60 + logger.fdebug('[RSS-FEEDS] duration_diff (mins): %s' % str(duration_diff)) + if duration_diff >= int(RSS_CHECKINTERVAL): + SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=datetime.datetime.now(), trigger=IntervalTrigger(hours=0, minutes=int(RSS_CHECKINTERVAL), timezone='UTC')) + else: + rss_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (int(RSS_CHECKINTERVAL) * 60) - (duration_diff * 60)) + logger.fdebug('[RSS-FEEDS] Scheduling next run for @ %s every %s minutes' % (rss_diff, RSS_CHECKINTERVAL)) + SCHED.add_job(func=rs.run, id='rss', name='RSS Feeds', args=[True], next_run_time=rss_diff, trigger=IntervalTrigger(hours=0, minutes=int(RSS_CHECKINTERVAL), timezone='UTC')) + + if ALT_PULL == 2: + weektimer = 4 + else: + weektimer = 24 #weekly pull list gets messed up if it's not populated first, so let's populate it then set the scheduler. - logger.info('Checking for existance of Weekly Comic listing...') - #PULLNEW = 'no' #reset the indicator here. - #threading.Thread(target=weeklypull.pullit).start() - #now the scheduler (check every 24 hours) - #SCHED.add_interval_job(weeklypull.pullit, hours=24) - if not NOWEEKLY: - WeeklyScheduler.thread.start() + logger.info('[WEEKLY] Checking for existance of Weekly Comic listing...') - #let's do a run at the Wanted issues here (on startup) if enabled. - #if NZB_STARTUP_SEARCH: - # threading.Thread(target=search.searchforissue).start() + #now the scheduler (check every 24 hours) + weekly_interval = weektimer * 60 * 60 + if SCHED_WEEKLY_LAST is not None: + weekly_timestamp = float(SCHED_WEEKLY_LAST) + else: + weekly_timestamp = helpers.utctimestamp() + weekly_interval + + ws = weeklypullit.Weekly() + duration_diff = (helpers.utctimestamp() - weekly_timestamp)/60 + + if duration_diff >= weekly_interval/60: + logger.info('[WEEKLY] Weekly Pull-Update initializing immediately as it has been %s hours since the last run' % (duration_diff/60)) + SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=datetime.datetime.now(), trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC')) + else: + weekly_diff = datetime.datetime.utcfromtimestamp(helpers.utctimestamp() + (weekly_interval - (duration_diff * 60))) + logger.fdebug('[WEEKLY] Scheduling next run for @ %s every %s hours' % (weekly_diff, weektimer)) + SCHED.add_job(func=ws.run, id='weekly', name='Weekly Pullist', next_run_time=weekly_diff, trigger=IntervalTrigger(hours=weektimer, minutes=0, timezone='UTC')) if CHECK_GITHUB: - VersionScheduler.thread.start() - #SCHED.add_interval_job(versioncheck.checkGithub, minutes=CHECK_GITHUB_INTERVAL) + vs = versioncheckit.CheckVersion() + SCHED.add_job(func=vs.run, id='version', name='Check Version', trigger=IntervalTrigger(hours=0, minutes=CHECK_GITHUB_INTERVAL, timezone='UTC')) - #run checkFolder every X minutes (basically Manual Run Post-Processing) + ##run checkFolder every X minutes (basically Manual Run Post-Processing) if ENABLE_CHECK_FOLDER: if DOWNLOAD_SCAN_INTERVAL >0: - logger.info('Enabling folder monitor for : ' + str(CHECK_FOLDER) + ' every ' + str(DOWNLOAD_SCAN_INTERVAL) + ' minutes.') - FolderMonitorScheduler.thread.start() - #SCHED.add_interval_job(helpers.checkFolder, minutes=int(DOWNLOAD_SCAN_INTERVAL)) + logger.info('[FOLDER MONITOR] Enabling folder monitor for : ' + str(CHECK_FOLDER) + ' every ' + str(DOWNLOAD_SCAN_INTERVAL) + ' minutes.') + fm = PostProcessor.FolderCheck() + SCHED.add_job(func=fm.run, id='monitor', name='Folder Monitor', trigger=IntervalTrigger(hours=0, minutes=int(DOWNLOAD_SCAN_INTERVAL), timezone='UTC')) else: - logger.error('You need to specify a monitoring time for the check folder option to work') - SCHED.start() + logger.error('[FOLDER MONITOR] You need to specify a monitoring time for the check folder option to work') + + logger.info('Firing up the Background Schedulers now....') + try: + SCHED.print_jobs() + SCHED.start() + #update the job db here + logger.info('Background Schedulers successfully started...') + helpers.job_management(write=True) #, status='Waiting') + except Exception as e: + logger.info(e) + # Debug + SCHED.print_jobs() started = True @@ -1838,6 +1913,7 @@ def dbcheck(): c.execute('CREATE TABLE IF NOT EXISTS searchresults (SRID TEXT, results Numeric, Series TEXT, publisher TEXT, haveit TEXT, name TEXT, deck TEXT, url TEXT, description TEXT, comicid TEXT, comicimage TEXT, issues TEXT, comicyear TEXT, ogcname TEXT)') c.execute('CREATE TABLE IF NOT EXISTS ref32p (ComicID TEXT UNIQUE, ID TEXT, Series TEXT, Updated TEXT)') c.execute('CREATE TABLE IF NOT EXISTS oneoffhistory (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, Status TEXT, weeknumber TEXT, year TEXT)') + c.execute('CREATE TABLE IF NOT EXISTS jobhistory (JobName TEXT, prev_run_datetime timestamp, prev_run_timestamp REAL, next_run_datetime timestamp, next_run_timestamp REAL, last_run_completed TEXT, successful_completions TEXT, failed_completions TEXT, status TEXT)') conn.commit c.close #new @@ -2322,13 +2398,19 @@ def dbcheck(): except sqlite3.OperationalError: c.execute('ALTER TABLE Failed ADD COLUMN DateFailed TEXT') - ## -- Failed Table -- + ## -- Ref32p Table -- try: c.execute('SELECT Updated from ref32p') except sqlite3.OperationalError: c.execute('ALTER TABLE ref32p ADD COLUMN Updated TEXT') + ## -- Jobhistory Table -- + try: + c.execute('SELECT status from jobhistory') + except sqlite3.OperationalError: + c.execute('ALTER TABLE jobhistory ADD COLUMN status TEXT') + #if it's prior to Wednesday, the issue counts will be inflated by one as the online db's everywhere #prepare for the next 'new' release of a series. It's caught in updater.py, so let's just store the #value in the sql so we can display it in the details screen for everyone to wonder at. @@ -2365,6 +2447,12 @@ def dbcheck(): logger.info('Correcting Null entries that make the main page break on startup.') c.execute("UPDATE Comics SET LatestDate='Unknown' WHERE LatestDate='None' or LatestDate is NULL") + job_listing = c.execute('SELECT * FROM jobhistory') + job_history = [] + for jh in job_listing: + job_history.append(jh) + + logger.info('job_history loaded: %s' % job_history) conn.commit() c.close() @@ -2438,51 +2526,55 @@ def halt(): if __INITIALIZED__: - logger.info(u"Aborting all threads") + logger.info(u"Trying to gracefully shutdown the background schedulers...") + try: + SCHED.shutdown() + except: + SCHED.shutdown(wait=False) # abort all the threads - dbUpdateScheduler.abort = True - logger.info(u"Waiting for the DB UPDATE thread to exit") - try: - dbUpdateScheduler.thread.join(10) - except: - pass + #dbUpdateScheduler.abort = True + #logger.info(u"Waiting for the DB UPDATE thread to exit") + #try: + # dbUpdateScheduler.thread.join(10) + #except: + # pass - searchScheduler.abort = True - logger.info(u"Waiting for the SEARCH thread to exit") - try: - searchScheduler.thread.join(10) - except: - pass + #searchScheduler.abort = True + #logger.info(u"Waiting for the SEARCH thread to exit") + #try: + # searchScheduler.thread.join(10) + #except: + # pass - RSSScheduler.abort = True - logger.info(u"Waiting for the RSS CHECK thread to exit") - try: - RSSScheduler.thread.join(10) - except: - pass + #RSSScheduler.abort = True + #logger.info(u"Waiting for the RSS CHECK thread to exit") + #try: + # RSSScheduler.thread.join(10) + #except: + # pass - WeeklyScheduler.abort = True - logger.info(u"Waiting for the WEEKLY CHECK thread to exit") - try: - WeeklyScheduler.thread.join(10) - except: - pass + #WeeklyScheduler.abort = True + #logger.info(u"Waiting for the WEEKLY CHECK thread to exit") + #try: + # WeeklyScheduler.thread.join(10) + #except: + # pass - VersionScheduler.abort = True - logger.info(u"Waiting for the VERSION CHECK thread to exit") - try: - VersionScheduler.thread.join(10) - except: - pass + #VersionScheduler.abort = True + #logger.info(u"Waiting for the VERSION CHECK thread to exit") + #try: + # VersionScheduler.thread.join(10) + #except: + # pass - FolderMonitorScheduler.abort = True - logger.info(u"Waiting for the FOLDER MONITOR thread to exit") - try: - FolderMonitorScheduler.thread.join(10) - except: - pass + #FolderMonitorScheduler.abort = True + #logger.info(u"Waiting for the FOLDER MONITOR thread to exit") + #try: + # FolderMonitorScheduler.thread.join(10) + #except: + # pass if SNPOOL is not None: logger.info('Terminating the auto-snatch thread.') @@ -2503,7 +2595,7 @@ def shutdown(restart=False, update=False): cherrypy.engine.exit() - SCHED.shutdown(wait=False) + #SCHED.shutdown(wait=False) config_write() diff --git a/mylar/auth32p.py b/mylar/auth32p.py index 1f25deb9..96453abc 100644 --- a/mylar/auth32p.py +++ b/mylar/auth32p.py @@ -1,6 +1,7 @@ import urllib2 import re import time +import math import datetime import os import requests @@ -48,6 +49,10 @@ class info32p(object): self.authkey = lses.authkey self.passkey = lses.passkey self.uid = lses.uid + try: + mylar.INKDROPS_32P = int(math.floor(float(lses.inkdrops['results'][0]['inkdrops']))) + except: + mylar.INKDROPS_32P = lses.inkdrops['results'][0]['inkdrops'] self.reauthenticate = reauthenticate self.searchterm = searchterm @@ -56,7 +61,7 @@ class info32p(object): def authenticate(self): if self.test: - return True + return {'status': True, 'inkdrops': mylar.INKDROPS_32P} feedinfo = [] @@ -80,7 +85,6 @@ class info32p(object): requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # post to the login form - r = s.post(self.url, verify=verify, allow_redirects=True) #logger.debug(self.module + " Content session reply" + r.text) @@ -154,7 +158,7 @@ class info32p(object): except NameError: logger.warn('Unable to retrieve information from 32Pages - either it is not responding/is down or something else is happening that is stopping me.') return - + if self.reauthenticate: return else: @@ -177,13 +181,13 @@ class info32p(object): for x in spl: publisher_search = re.sub(x, '', publisher_search).strip() logger.info('publisher search set to : ' + publisher_search) - + chk_id = None # lookup the ComicID in the 32p sqlite3 table to pull the series_id to use. if comic_id: chk_id = helpers.checkthe_id(comic_id) - - if not chk_id: + + if any([not chk_id, mylar.DEEP_SEARCH_32P is True]): #generate the dynamic name of the series here so we can match it up as_d = filechecker.FileChecker() as_dinfo = as_d.dynamic_replace(series_search) @@ -234,7 +238,7 @@ class info32p(object): pdata = [] pubmatch = False - if not chk_id: + if any([not chk_id, mylar.DEEP_SEARCH_32P is True]): if mylar.SEARCH_32P: url = 'https://32pag.es/torrents.php' #?action=serieslist&filter=' + series_search #&filter=F params = {'action': 'serieslist', 'filter': series_search} @@ -283,21 +287,14 @@ class info32p(object): if all([len(data) == 0, len(pdata) == 0]): return "no results" - - if len(pdata) == 1: - logger.info(str(len(pdata)) + ' series match the title being search for') - dataset = pdata - searchid = pdata[0]['id'] - elif len(data) == 1: - logger.info(str(len(data)) + ' series match the title being search for') - dataset = data - searchid = data[0]['id'] else: dataset = [] if len(data) > 0: dataset += data if len(pdata) > 0: dataset += pdata + logger.info('dataset: %s' % dataset) + logger.info(str(len(dataset)) + ' series match the tile being searched for on 32P...') if chk_id is None and any([len(data) == 1, len(pdata) == 1]): #update the 32p_reference so we avoid doing a url lookup next time @@ -326,8 +323,6 @@ class info32p(object): #logger.debug(self.module + ' Reply from AJAX: \n %s', d.text) except Exception as e: logger.info(self.module + ' Could not POST URL %s', url) - - try: searchResults = d.json() @@ -337,7 +332,6 @@ class info32p(object): return False #logger.debug(self.module + " Search Result: %s", searchResults) - if searchResults['status'] == 'success' and searchResults['count'] > 0: logger.info('successfully retrieved ' + str(searchResults['count']) + ' search results.') for a in searchResults['details']: @@ -392,6 +386,7 @@ class info32p(object): self.authkey = None self.passkey = None self.uid = None + self.inkdrops = None def cookie_exists(self, name): ''' @@ -459,6 +454,19 @@ class info32p(object): self.uid = j['response']['id'] self.authkey = j['response']['authkey'] self.passkey = pk = j['response']['passkey'] + + try: + d = self.ses.get('https://32pag.es/ajax.php', params={'action': 'user_inkdrops'}, verify=True, allow_redirects=True) + except Exception as e: + logger.error('Unable to retreive Inkdrop total : %s' % e) + else: + try: + self.inkdrops = d.json() + except: + logger.error('Inkdrop result did not return valid JSON, unable to verify response') + else: + logger.info('inkdrops: %s' % self.inkdrops) + return True def valid_login_attempt(self, un, pw): diff --git a/mylar/cmtagmylar.py b/mylar/cmtagmylar.py index 0613eb78..da70f76f 100644 --- a/mylar/cmtagmylar.py +++ b/mylar/cmtagmylar.py @@ -158,6 +158,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen original_tagoptions = tagoptions og_tagtype = None initial_ctrun = True + error_remove = False while (i <= tagcnt): if initial_ctrun: @@ -207,7 +208,12 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen if initial_ctrun and 'exported successfully' in out: logger.fdebug(module + '[COMIC-TAGGER] : ' +str(out)) #Archive exported successfully to: X-Men v4 008 (2014) (Digital) (Nahga-Empire).cbz (Original deleted) - tmpfilename = re.sub('Archive exported successfully to: ', '', out.rstrip()) + if 'Error deleting' in filepath: + tf1 = out.find('exported successfully to: ') + tmpfilename = out[tf1 + len('exported successfully to: '):].strip() + error_remove = True + else: + tmpfilename = re.sub('Archive exported successfully to: ', '', out.rstrip()) if mylar.FILE_OPTS == 'move': tmpfilename = re.sub('\(Original deleted\)', '', tmpfilename).strip() tmpf = tmpfilename.decode('utf-8') @@ -232,7 +238,7 @@ def run(dirName, nzbName=None, issueid=None, comversion=None, manual=None, filen if 'file is not expected size' in out: logger.fdebug('%s Output: %s' % (module,out)) tidyup(og_filepath, new_filepath, new_folder, manualmeta) - return 'fail' #'corrupt' + return 'corrupt' else: logger.warn(module + '[COMIC-TAGGER][CBR-TO-CBZ] Failed to convert cbr to cbz - check permissions on folder : ' + mylar.CACHE_DIR + ' and/or the location where Mylar is trying to tag the files from.') tidyup(og_filepath, new_filepath, new_folder, manualmeta) diff --git a/mylar/dbupdater.py b/mylar/dbupdater.py index 137b78d4..5f7b5638 100644 --- a/mylar/dbupdater.py +++ b/mylar/dbupdater.py @@ -17,15 +17,17 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers #import threading class dbUpdate(): - def __init__(self): + def __init__(self, sched): + self.sched = sched pass def run(self): logger.info('[DBUpdate] Updating Database.') - mylar.updater.dbUpdate() - return + helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running') + mylar.updater.dbUpdate(sched=self.sched) + helpers.job_management(write=True, job='DB Updater', last_run_completed=helpers.utctimestamp(), status='Waiting') diff --git a/mylar/filechecker.py b/mylar/filechecker.py index fedac0ed..e3ec5664 100755 --- a/mylar/filechecker.py +++ b/mylar/filechecker.py @@ -47,6 +47,7 @@ class FileChecker(object): #watchcomic = unicode name of series that is being searched against self.og_watchcomic = watchcomic self.watchcomic = re.sub('\?', '', watchcomic).strip() #strip the ? sepearte since it affects the regex. + self.watchcomic = re.sub(u'\u2014', ' - ', watchcomic).strip() #replace the \u2014 with a normal - because this world is f'd up enough to have something like that. self.watchcomic = unicodedata.normalize('NFKD', self.watchcomic).encode('ASCII', 'ignore') else: self.watchcomic = None @@ -91,7 +92,7 @@ class FileChecker(object): self.failed_files = [] - self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')'] + self.dynamic_handlers = ['/','-',':','\'',',','&','?','!','+','(',')','\u2014'] self.dynamic_replacements = ['and','the'] self.rippers = ['-empire','-empire-hd','minutemen-','-dcp'] @@ -502,6 +503,8 @@ class FileChecker(object): volume_found['position'] = split_file.index(volumeprior_label, current_pos -1) #if this passes, then we're ok, otherwise will try exception logger.fdebug('volume_found: ' + str(volume_found['position'])) except: + volumeprior = False + volumeprior_label = None sep_volume = False continue else: @@ -577,6 +580,9 @@ class FileChecker(object): else: raise ValueError except ValueError, e: + volumeprior = False + volumeprior_label = None + sep_volume = False pass #logger.fdebug('Error detecting issue # - ignoring this result : ' + str(sf)) @@ -855,7 +861,7 @@ class FileChecker(object): mod_series_decoded = self.dynamic_replace(series_info['series_name_decoded']) mod_seriesname_decoded = mod_dynamicinfo['mod_seriesname'] mod_watch_decoded = self.dynamic_replace(self.og_watchcomic) - mod_watchname_decoded = mod_dynamicinfo['mod_seriesname'] + mod_watchname_decoded = mod_dynamicinfo['mod_watchcomic'] #remove the spaces... nspace_seriesname = re.sub(' ', '', mod_seriesname) @@ -1037,6 +1043,7 @@ class FileChecker(object): spacer+='|' mod_watchcomic = mod_watchcomic[:wd] + spacer + mod_watchcomic[wd+len(wdrm):] + series_name = re.sub(u'\u2014', ' - ', series_name) seriesdynamic_handlers_match = [x for x in self.dynamic_handlers if x.lower() in series_name.lower()] #logger.fdebug('series dynamic handlers recognized : ' + str(seriesdynamic_handlers_match)) seriesdynamic_replacements_match = [x for x in self.dynamic_replacements if x.lower() in series_name.lower()] diff --git a/mylar/findcomicfeed.py b/mylar/findcomicfeed.py index 616e6d57..9eee2799 100755 --- a/mylar/findcomicfeed.py +++ b/mylar/findcomicfeed.py @@ -2,6 +2,7 @@ import os import sys +import time import feedparser import re import logger @@ -54,9 +55,13 @@ def Startit(searchName, searchIssue, searchYear, ComicVersion, IssDateFix): max_age = "&age=" + str(mylar.USENET_RETENTION) feeds = [] + feed1 = "http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch feeds.append(feedparser.parse("http://nzbindex.nl/rss/alt.binaries.comics.dcp/?sort=agedesc&" + str(size_constraints) + str(max_age) + "&dq=%s&max=50&more=1" %joinSearch)) + time.sleep(3) if mylar.ALTEXPERIMENTAL: + feed2 = "http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch feeds.append(feedparser.parse("http://nzbindex.nl/rss/?dq=%s&g[]=41&g[]=510&sort=agedesc&hidespam=0&max=&more=1" %joinSearch)) + time.sleep(3) entries = [] mres = {} diff --git a/mylar/helpers.py b/mylar/helpers.py index 803a6873..cc9fdb85 100755 --- a/mylar/helpers.py +++ b/mylar/helpers.py @@ -24,9 +24,11 @@ import json import re import sys import platform +import calendar import itertools import shutil import os, errno +from apscheduler.triggers.interval import IntervalTrigger import mylar import logger @@ -128,6 +130,9 @@ def now(): now = datetime.datetime.now() return now.strftime("%Y-%m-%d %H:%M:%S") +def utctimestamp(): + return time.time() + def bytes_to_mb(bytes): mb = int(bytes) /1048576 @@ -649,9 +654,9 @@ def rename_param(comicid, comicname, issue, ofilename, comicyear=None, issueid=N logger.fdebug('Destination: ' + dst) rename_this = {"destination_dir": dst, - "nfilename": nfilename, - "issueid": issueid, - "comicid": comicid} + "nfilename": nfilename, + "issueid": issueid, + "comicid": comicid} return rename_this @@ -1383,6 +1388,8 @@ def havetotals(refreshit=None): def filesafe(comic): import unicodedata + if u'\u2014' in comic: + comic = re.sub(u'\u2014', ' - ', comic) try: u_comic = unicodedata.normalize('NFKD', comic).encode('ASCII', 'ignore').strip() except TypeError: @@ -1969,25 +1976,44 @@ def listIssues(weeknumber, year): library = [] myDB = db.DBConnection() # Get individual issues - list = myDB.select("SELECT issues.Status, issues.ComicID, issues.IssueID, issues.ComicName, weekly.publisher, issues.Issue_Number from weekly, issues where weekly.IssueID = issues.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) + list = myDB.select("SELECT issues.Status, issues.ComicID, issues.IssueID, issues.ComicName, issues.IssueDate, issues.ReleaseDate, weekly.publisher, issues.Issue_Number from weekly, issues where weekly.IssueID = issues.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) for row in list: + if row['ReleaseDate'] is None: + tmpdate = row['IssueDate'] + else: + tmpdate = row['ReleaseDate'] library.append({'ComicID': row['ComicID'], - 'Status': row['Status'], - 'IssueID': row['IssueID'], - 'ComicName': row['ComicName'], - 'Publisher': row['publisher'], - 'Issue_Number': row['Issue_Number']}) + 'Status': row['Status'], + 'IssueID': row['IssueID'], + 'ComicName': row['ComicName'], + 'Publisher': row['publisher'], + 'Issue_Number': row['Issue_Number'], + 'IssueYear': tmpdate}) + # Add the annuals if mylar.ANNUALS_ON: - list = myDB.select("SELECT annuals.Status, annuals.ComicID, annuals.ReleaseComicID, annuals.IssueID, annuals.ComicName, weekly.publisher, annuals.Issue_Number from weekly, annuals where weekly.IssueID = annuals.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) + list = myDB.select("SELECT annuals.Status, annuals.ComicID, annuals.ReleaseComicID, annuals.IssueID, annuals.ComicName, annuals.ReleaseDate, annuals.IssueDate, weekly.publisher, annuals.Issue_Number from weekly, annuals where weekly.IssueID = annuals.IssueID and weeknumber = ? and year = ?", [int(weeknumber), year]) for row in list: + if row['ReleaseDate'] is None: + tmpdate = row['IssueDate'] + else: + tmpdate = row['ReleaseDate'] library.append({'ComicID': row['ComicID'], 'Status': row['Status'], 'IssueID': row['IssueID'], 'ComicName': row['ComicName'], 'Publisher': row['publisher'], - 'Issue_Number': row['Issue_Number']}) + 'Issue_Number': row['Issue_Number'], + 'IssueYear': tmpdate}) + #tmplist = library + #librarylist = [] + #for liblist in tmplist: + # lb = myDB.select('SELECT ComicVersion, Type, ComicYear, ComicID from comics WHERE ComicID=?', [liblist['ComicID']]) + # librarylist.append(liblist) + # librarylist.update({'Comic_Volume': lb['ComicVersion'], + # 'ComicYear': lb['ComicYear'], + # 'ComicType': lb['Type']}) return library def incr_snatched(ComicID): @@ -2027,13 +2053,13 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): #'write' - write new file #'dupe_file' - do not write new file as existing file is better quality #'dupe_src' - write new file, as existing file is a lesser quality (dupe) - rtnval = [] + if dupchk['Status'] == 'Downloaded' or dupchk['Status'] == 'Archived': try: dupsize = dupchk['ComicSize'] except: logger.info('[DUPECHECK] Duplication detection returned no hits as this is a new Snatch. This is not a duplicate.') - rtnval.append({'action': "write"}) + rtnval = {'action': "write"} logger.info('[DUPECHECK] Existing Status already set to ' + dupchk['Status']) cid = [] @@ -2051,11 +2077,11 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #file is Archived, but no entry exists in the db for the location. Assume Archived, and don't post-process. logger.fdebug('[DUPECHECK] File is Archived but no file can be located within the db at the specified location. Assuming this was a manual archival and will not post-process this issue.') - rtnval.append({'action': "dont_dupe"}) + rtnval = {'action': "dont_dupe"} else: - rtnval.append({'action': "dupe_file", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_file", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: logger.info('[DUPECHECK] Existing file within db :' + dupchk['Location'] + ' has a filesize of : ' + str(dupsize) + ' bytes.') @@ -2066,8 +2092,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): logger.info('[DUPECHECK] Existing filesize is 0 as I cannot locate the original entry.') if dupchk['Status'] == 'Archived': logger.info('[DUPECHECK] Assuming issue is Archived.') - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} return rtnval else: logger.info('[DUPECHECK] Assuming 0-byte file - this one is gonna get hammered.') @@ -2086,8 +2112,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in file : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: if dupchk['Location'].endswith('.cbz'): logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] BOTH files are in cbz format. Retaining the larger filesize of the two.') @@ -2095,8 +2121,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBR PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in file : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} elif 'cbz' in mylar.DUPECONSTRAINT: if filename.endswith('.cbr'): @@ -2106,8 +2132,8 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} else: if dupchk['Location'].endswith('.cbz'): logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] BOTH files are in cbz format. Retaining the larger filesize of the two.') @@ -2115,22 +2141,22 @@ def duplicate_filecheck(filename, ComicID=None, IssueID=None, StoryArcID=None): else: #keep filename logger.info('[DUPECHECK-CBZ PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} if mylar.DUPECONSTRAINT == 'filesize' or tmp_dupeconstraint == 'filesize': if filesz <= int(dupsize) and int(dupsize) != 0: logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining currently scanned in filename : ' + dupchk['Location']) - rtnval.append({'action': "dupe_file", - 'to_dupe': filename}) + rtnval = {'action': "dupe_file", + 'to_dupe': filename} else: logger.info('[DUPECHECK-FILESIZE PRIORITY] [#' + dupchk['Issue_Number'] + '] Retaining newly scanned in filename : ' + filename) - rtnval.append({'action': "dupe_src", - 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])}) + rtnval = {'action': "dupe_src", + 'to_dupe': os.path.join(series['ComicLocation'], dupchk['Location'])} else: logger.info('[DUPECHECK] Duplication detection returned no hits. This is not a duplicate of anything that I have scanned in as of yet.') - rtnval.append({'action': "write"}) + rtnval = {'action': "write"} return rtnval def create_https_certificates(ssl_cert, ssl_key): @@ -2676,17 +2702,21 @@ def torrentinfo(issueid=None, torrent_hash=None, download=False, monitor=False): curScriptName = shell_cmd + ' ' + str(mylar.AUTO_SNATCH_SCRIPT).decode("string_escape") if torrent_files > 1: - downlocation = torrent_folder + downlocation = torrent_folder.encode('utf-8') else: - downlocation = torrent_info['files'][0] #os.path.join(torrent_folder, torrent_info['name']) + if mylar.USE_DELUGE: + downlocation = os.path.join(torrent_folder, torrent_info['files'][0]['path']) + else: + downlocation = torrent_info['files'][0].encode('utf-8') - downlocation = re.sub("'", "\\'", downlocation) - downlocation = re.sub("&", "\\&", downlocation) + os.environ['downlocation'] = re.sub("'", "\\'",downlocation) + #downlocation = re.sub("\'", "\\'", downlocation) + #downlocation = re.sub("&", "\&", downlocation) - script_cmd = shlex.split(curScriptName, posix=False) + [downlocation] + script_cmd = shlex.split(curScriptName, posix=False) # + [downlocation] logger.fdebug(u"Executing command " +str(script_cmd)) try: - p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=mylar.PROG_DIR) + p = subprocess.Popen(script_cmd, env=dict(os.environ), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=mylar.PROG_DIR) out, err = p.communicate() logger.fdebug(u"Script result: " + out) except OSError, e: @@ -2947,6 +2977,169 @@ def get_the_hash(filepath): logger.info('Hash of file : ' + thehash) return {'hash': thehash} +def date_conversion(originaldate): + c_obj_date = datetime.datetime.strptime(originaldate, "%Y-%m-%d %H:%M:%S") + n_date = datetime.datetime.now() + absdiff = abs(n_date - c_obj_date) + hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0 + return hours + +def job_management(write=False, job=None, last_run_completed=None, current_run=None, status=None): + jobresults = [] + + import db + myDB = db.DBConnection() + + if job is None: + dbupdate_newstatus = 'Waiting' + dbupdate_nextrun = None + rss_newstatus = 'Waiting' + rss_nextrun = None + weekly_newstatus = 'Waiting' + weekly_nextrun = None + search_newstatus = 'Waiting' + search_nextrun = None + version_newstatus = 'Waiting' + version_nextrun = None + monitor_newstatus = 'Waiting' + monitor_nextrun = None + + job_info = myDB.select('select * from jobhistory') + #set default values if nothing has been ran yet + for ji in job_info: + if 'update' in ji['JobName'].lower(): + if mylar.SCHED_DBUPDATE_LAST is None: + mylar.SCHED_DBUPDATE_LAST = ji['prev_run_timestamp'] + dbupdate_newstatus = ji['status'] + dbupdate_nextrun = ji['next_run_timestamp'] + elif 'search' in ji['JobName'].lower(): + if mylar.SCHED_SEARCH_LAST is None: + mylar.SCHED_SEARCH_LAST = ji['prev_run_timestamp'] + search_newstatus = ji['status'] + search_nextrun = ji['next_run_timestamp'] + elif 'rss' in ji['JobName'].lower(): + if mylar.SCHED_RSS_LAST is None: + mylar.SCHED_RSS_LAST = ji['prev_run_timestamp'] + rss_newstatus = ji['status'] + rss_nextrun = ji['next_run_timestamp'] + elif 'weekly' in ji['JobName'].lower(): + if mylar.SCHED_WEEKLY_LAST is None: + mylar.SCHED_WEEKLY_LAST = ji['prev_run_timestamp'] + weekly_newstatus = ji['status'] + weekly_nextrun = ji['next_run_timestamp'] + elif 'version' in ji['JobName'].lower(): + if mylar.SCHED_VERSION_LAST is None: + mylar.SCHED_VERSION_LAST = ji['prev_run_timestamp'] + version_newstatus = ji['status'] + version_nextrun = ji['next_run_timestamp'] + elif 'monitor' in ji['JobName'].lower(): + if mylar.SCHED_MONITOR_LAST is None: + mylar.SCHED_MONITOR_LAST = ji['prev_run_timestamp'] + monitor_newstatus = ji['status'] + monitor_nextrun = ji['next_run_timestamp'] + + #this is for initial startup + for jb in mylar.SCHED.get_jobs(): + #logger.fdebug('jb: %s' % jb) + jobinfo = str(jb) + if 'update' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_DBUPDATE_LAST + newstatus = dbupdate_newstatus + elif 'search' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_SEARCH_LAST + newstatus = search_newstatus + elif 'rss' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_RSS_LAST + newstatus = rss_newstatus + elif 'weekly' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_WEEKLY_LAST + newstatus = weekly_newstatus + elif 'version' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_VERSION_LAST + newstatus = version_newstatus + elif 'monitor' in jobinfo.lower(): + prev_run_timestamp = mylar.SCHED_MONITOR_LAST + newstatus = monitor_newstatus + jobname = jobinfo[:jobinfo.find('(')-1].strip() + #logger.fdebug('jobinfo: %s' % jobinfo) + jobtimetmp = jobinfo.split('at: ')[1].split('.')[0].strip() + #logger.fdebug('jobtimetmp: %s' % jobtimetmp) + jobtime = float(calendar.timegm(datetime.datetime.strptime(jobtimetmp[:-1], '%Y-%m-%d %H:%M:%S %Z').timetuple())) + #logger.fdebug('jobtime: %s' % jobtime) + + if prev_run_timestamp is not None: + prev_run_time_utc = datetime.datetime.utcfromtimestamp(float(prev_run_timestamp)) + else: + prev_run_time_utc = None + #logger.fdebug('prev_run_time: %s' % prev_run_timestamp) + #logger.fdebug('prev_run_time type: %s' % type(prev_run_timestamp)) + jobresults.append({'jobname': jobname, + 'next_run_datetime': datetime.datetime.utcfromtimestamp(jobtime), + 'prev_run_datetime': prev_run_time_utc, + 'next_run_timestamp': jobtime, + 'prev_run_timestamp': prev_run_timestamp, + 'status': newstatus}) + + if not write: + #logger.info('jobresults: %s' % jobresults) + return jobresults + else: + if job is None: + for x in jobresults: + updateCtrl = {'JobName': x['jobname']} + updateVals = {'next_run_timestamp': x['next_run_timestamp'], + 'prev_run_timestamp': x['prev_run_timestamp'], + 'next_run_datetime': x['next_run_datetime'], + 'prev_run_datetime': x['prev_run_datetime'], + 'status': x['status']} + + myDB.upsert('jobhistory', updateVals, updateCtrl) + else: + #logger.fdebug('Updating info - job: %s' % job) + #logger.fdebug('Updating info - last run: %s' % last_run_completed) + #logger.fdebug('Updating info - status: %s' % status) + updateCtrl = {'JobName': job} + if current_run is not None: + updateVals = {'prev_run_timestamp': current_run, + 'prev_run_datetime': datetime.datetime.utcfromtimestamp(current_run), + 'status': status} + #logger.info('updateVals: %s' % updateVals) + elif last_run_completed is not None: + if job == 'DB Updater': + mylar.SCHED.reschedule_job('dbupdater', trigger=IntervalTrigger(hours=0, minutes=5, timezone='UTC')) + nextrun_stamp = utctimestamp() + (5 * 60) + elif job == 'Auto-Search': + mylar.SCHED.reschedule_job('search', trigger=IntervalTrigger(hours=0, minutes=mylar.SEARCH_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (mylar.SEARCH_INTERVAL * 60) + elif job == 'RSS Feeds': + mylar.SCHED.reschedule_job('rss', trigger=IntervalTrigger(hours=0, minutes=int(mylar.RSS_CHECKINTERVAL), timezone='UTC')) + nextrun_stamp = utctimestamp() + (int(mylar.RSS_CHECKINTERVAL) * 60) + elif job == 'Weekly Pullist': + if mylar.ALT_PULL == 2: + wkt = 4 + else: + wkt = 24 + mylar.SCHED.reschedule_job('weekly', trigger=IntervalTrigger(hours=wkt, minutes=mylar.SEARCH_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (wkt * 60 * 60) + elif job == 'Check Version': + mylar.SCHED.reschedule_job('version', trigger=IntervalTrigger(hours=0, minutes=mylar.CHECK_GITHUB_INTERVAL, timezone='UTC')) + nextrun_stamp = utctimestamp() + (mylar.CHECK_GITHUB_INTERVAL * 60) + elif job == 'Folder Monitor': + mylar.SCHED.reschedule_job('monitor', trigger=IntervalTrigger(hours=0, minutes=int(mylar.DOWNLOAD_SCAN_INTERVAL), timezone='UTC')) + nextrun_stamp = utctimestamp() + (int(mylar.DOWNLOAD_SCAN_INTERVAL) * 60) + + nextrun_date = datetime.datetime.utcfromtimestamp(nextrun_stamp) + logger.fdebug('ReScheduled job: %s to %s' % (job, nextrun_date)) + #if it's completed, then update the last run time to the ending time of the job + updateVals = {'prev_run_timestamp': last_run_completed, + 'prev_run_datetime': datetime.datetime.utcfromtimestamp(last_run_completed), + 'last_run_completed': 'True', + 'next_run_timestamp': nextrun_stamp, + 'next_run_datetime': nextrun_date, + 'status': status} + + #logger.fdebug('Job update for %s: %s' % (updateCtrl, updateVals)) + myDB.upsert('jobhistory', updateVals, updateCtrl) def file_ops(path,dst,arc=False,one_off=False): # # path = source path + filename diff --git a/mylar/rsscheck.py b/mylar/rsscheck.py index 66110859..f21383dd 100755 --- a/mylar/rsscheck.py +++ b/mylar/rsscheck.py @@ -831,7 +831,7 @@ def nzbdbsearch(seriesname, issue, comicid=None, nzbprov=None, searchYear=None, nzbinfo['entries'] = nzbtheinfo return nzbinfo -def torsend2client(seriesname, issue, seriesyear, linkit, site): +def torsend2client(seriesname, issue, seriesyear, linkit, site, pubhash=None): logger.info('matched on ' + seriesname) filename = helpers.filesafe(seriesname) filename = re.sub(' ', '_', filename) @@ -1119,8 +1119,8 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): try: dc = deluge.TorrentClient() if not dc.connect(mylar.DELUGE_HOST, mylar.DELUGE_USERNAME, mylar.DELUGE_PASSWORD): - return "fail" logger.info('Not connected to Deluge!') + return "fail" else: logger.info('Connected to Deluge! Will try to add torrent now!') torrent_info = dc.load_torrent(filepath) @@ -1159,8 +1159,11 @@ def torsend2client(seriesname, issue, seriesyear, linkit, site): elif mylar.USE_WATCHDIR: if mylar.TORRENT_LOCAL: - #get the hash so it doesn't mess up... - torrent_info = helpers.get_the_hash(filepath) + if nzbprov == 'TPSE': + torrent_info = {'hash': pubhash} + else: + #get the hash so it doesn't mess up... + torrent_info = helpers.get_the_hash(filepath) torrent_info['clientmode'] = 'watchdir' torrent_info['link'] = linkit torrent_info['filepath'] = filepath diff --git a/mylar/rsscheckit.py b/mylar/rsscheckit.py index 28ad0ef9..94580be5 100755 --- a/mylar/rsscheckit.py +++ b/mylar/rsscheckit.py @@ -24,54 +24,51 @@ rss_lock = threading.Lock() class tehMain(): - def __init__(self, forcerss=None): - - self.forcerss = forcerss - - def run(self): + def __init__(self): + pass + def run(self, forcerss=None): + logger.info('forcerss is : %s' % forcerss) with rss_lock: - logger.info('RSS Feed Check was last run at : ' + str(mylar.RSS_LASTRUN)) + logger.info('[RSS-FEEDS] RSS Feed Check was last run at : ' + str(mylar.SCHED_RSS_LAST)) firstrun = "no" #check the last run of rss to make sure it's not hammering. - if mylar.RSS_LASTRUN is None or mylar.RSS_LASTRUN == '' or mylar.RSS_LASTRUN == '0' or self.forcerss == True: - logger.info('RSS Feed Check First Ever Run.') + if mylar.SCHED_RSS_LAST is None or mylar.SCHED_RSS_LAST == '' or mylar.SCHED_RSS_LAST == '0' or forcerss == True: + logger.info('[RSS-FEEDS] RSS Feed Check Initalizing....') firstrun = "yes" - mins = 0 + duration_diff = 0 else: - c_obj_date = datetime.datetime.strptime(mylar.RSS_LASTRUN, "%Y-%m-%d %H:%M:%S") - n_date = datetime.datetime.now() - absdiff = abs(n_date - c_obj_date) - mins = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 60.0 #3600 is for hours. - - if firstrun == "no" and mins < int(mylar.RSS_CHECKINTERVAL): - logger.fdebug('RSS Check has taken place less than the threshold - not initiating at this time.') + tstamp = float(mylar.SCHED_RSS_LAST) + duration_diff = abs(helpers.utctimestamp() - tstamp)/60 + logger.fdebug('[RSS-FEEDS] Duration diff: %s' % duration_diff) + if firstrun == "no" and duration_diff < int(mylar.RSS_CHECKINTERVAL): + logger.fdebug('[RSS-FEEDS] RSS Check has taken place less than the threshold - not initiating at this time.') return - mylar.RSS_LASTRUN = helpers.now() - logger.fdebug('Updating RSS Run time to : ' + str(mylar.RSS_LASTRUN)) - mylar.config_write() + helpers.job_management(write=True, job='RSS Feeds', current_run=helpers.utctimestamp(), status='Running') + mylar.RSS_STATUS = 'Running' + logger.fdebug('[RSS-FEEDS] Updated RSS Run time to : ' + str(mylar.SCHED_RSS_LAST)) #function for looping through nzbs/torrent feeds if mylar.ENABLE_TORRENT_SEARCH: - logger.info('[RSS] Initiating Torrent RSS Check.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Check.') if mylar.ENABLE_TPSE: - logger.info('[RSS] Initiating Torrent RSS Feed Check on TorrentProject.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on TorrentProject.') #rsscheck.torrents(pickfeed='3') #TP.SE RSS Check (has to be page-parsed) rsscheck.torrents(pickfeed='TPSE') #TPSE = DEM RSS Check + WWT RSS Check if mylar.ENABLE_32P: - logger.info('[RSS] Initiating Torrent RSS Feed Check on 32P.') + logger.info('[RSS-FEEDS] Initiating Torrent RSS Feed Check on 32P.') if mylar.MODE_32P == 0: - logger.fdebug('[RSS] 32P mode set to Legacy mode. Monitoring New Releases feed only.') + logger.fdebug('[RSS-FEEDS] 32P mode set to Legacy mode. Monitoring New Releases feed only.') if any([mylar.PASSKEY_32P is None, mylar.PASSKEY_32P == '', mylar.RSSFEED_32P is None, mylar.RSSFEED_32P == '']): - logger.error('[RSS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.') + logger.error('[RSS-FEEDS] Unable to validate information from provided RSS Feed. Verify that the feed provided is a current one.') else: rsscheck.torrents(pickfeed='1', feedinfo=mylar.KEYS_32P) else: - logger.fdebug('[RSS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed') + logger.fdebug('[RSS-FEEDS] 32P mode set to Auth mode. Monitoring all personal notification feeds & New Releases feed') if any([mylar.USERNAME_32P is None, mylar.USERNAME_32P == '', mylar.PASSWORD_32P is None]): - logger.error('[RSS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.') + logger.error('[RSS-FEEDS] Unable to sign-on to 32P to validate settings. Please enter/check your username password in the configuration.') else: if mylar.KEYS_32P is None: feed32p = auth32p.info32p() @@ -83,7 +80,7 @@ class tehMain(): feedinfo = mylar.FEEDINFO_32P if feedinfo is None or len(feedinfo) == 0 or feedinfo == "disable": - logger.error('[RSS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.') + logger.error('[RSS-FEEDS] Unable to retrieve any information from 32P for RSS Feeds. Skipping for now.') else: rsscheck.torrents(pickfeed='1', feedinfo=feedinfo[0]) x = 0 @@ -93,12 +90,14 @@ class tehMain(): pfeed_32p = str(7 + x) rsscheck.torrents(pickfeed=pfeed_32p, feedinfo=fi) - logger.info('[RSS] Initiating RSS Feed Check for NZB Providers.') - rsscheck.nzbs(forcerss=self.forcerss) - logger.info('[RSS] RSS Feed Check/Update Complete') - logger.info('[RSS] Watchlist Check for new Releases') + logger.info('[RSS-FEEDS] Initiating RSS Feed Check for NZB Providers.') + rsscheck.nzbs(forcerss=forcerss) + logger.info('[RSS-FEEDS] RSS Feed Check/Update Complete') + logger.info('[RSS-FEEDS] Watchlist Check for new Releases') mylar.search.searchforissue(rsscheck='yes') - logger.info('[RSS] Watchlist Check complete.') - if self.forcerss: - logger.info('[RSS] Successfully ran a forced RSS Check.') - return + logger.info('[RSS-FEEDS] Watchlist Check complete.') + if forcerss: + logger.info('[RSS-FEEDS] Successfully ran a forced RSS Check.') + helpers.job_management(write=True, job='RSS Feeds', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.RSS_STATUS = 'Waiting' + return True diff --git a/mylar/search.py b/mylar/search.py index 6cf641fe..6894ccd0 100755 --- a/mylar/search.py +++ b/mylar/search.py @@ -2263,7 +2263,7 @@ def searcher(nzbprov, nzbname, comicinfo, link, IssueID, ComicID, tmpprov, direc logger.fdebug("link:" + link) logger.fdebug("Torrent Provider:" + nzbprov) - rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov) + rcheck = rsscheck.torsend2client(ComicName, IssueNumber, comyear, link, nzbprov, nzbid) #nzbid = hash for usage with public torrents if rcheck == "fail": if mylar.FAILED_DOWNLOAD_HANDLING: logger.error('Unable to send torrent to client. Assuming incomplete link - sending to Failed Handler and continuing search.') diff --git a/mylar/searchit.py b/mylar/searchit.py index eb49cf61..60d1b294 100755 --- a/mylar/searchit.py +++ b/mylar/searchit.py @@ -17,7 +17,7 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers class CurrentSearcher(): def __init__(self, **kwargs): @@ -26,4 +26,9 @@ class CurrentSearcher(): def run(self): logger.info('[SEARCH] Running Search for Wanted.') + helpers.job_management(write=True, job='Auto-Search', current_run=helpers.utctimestamp(), status='Running') + mylar.SEARCH_STATUS = 'Running' mylar.search.searchforissue() + helpers.job_management(write=True, job='Auto-Search', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.SEARCH_STATUS = 'Waiting' + #mylar.SCHED_SEARCH_LAST = helpers.now() diff --git a/mylar/torrent/clients/deluge.py b/mylar/torrent/clients/deluge.py index 2c7a8187..51f65b3e 100644 --- a/mylar/torrent/clients/deluge.py +++ b/mylar/torrent/clients/deluge.py @@ -146,7 +146,7 @@ class TorrentClient(object): else: logger.info('Torrent successfully added!') return {'hash': torrent_info['hash'], - 'label': torrent_info['label'], + 'label': mylar.DELUGE_LABEL, 'folder': torrent_info['save_path'], 'total_filesize': torrent_info['total_size'], 'name': torrent_info['name'], diff --git a/mylar/torrent/clients/rtorrent.py b/mylar/torrent/clients/rtorrent.py index a5de44fc..7f788703 100755 --- a/mylar/torrent/clients/rtorrent.py +++ b/mylar/torrent/clients/rtorrent.py @@ -45,18 +45,26 @@ class TorrentClient(object): logger.info(url) if username and password: + logger.info('username: %s / password: %s' % (username, 'redacted')) try: self.conn = RTorrent( url,(auth, username, password), verify_server=True, verify_ssl=self.getVerifySsl() ) - except: + except Exception as err: + logger.error('Failed to connect to rTorrent: %s', err) return False else: + logger.info('NO username %s / NO password %s' % (username, password)) try: - self.conn = RTorrent(host) - except: + self.conn = RTorrent( + url, (auth, username, password), + verify_server=True, + verify_ssl=self.getVerifySsl() + ) + except Exception as err: + logger.error('Failed to connect to rTorrent: %s', err) return False return self.conn diff --git a/mylar/updater.py b/mylar/updater.py index afd9b1cd..3184cd13 100755 --- a/mylar/updater.py +++ b/mylar/updater.py @@ -17,6 +17,7 @@ import datetime from xml.dom.minidom import parseString import urllib2 import shlex +import operator import re import os import itertools @@ -24,17 +25,16 @@ import itertools import mylar from mylar import db, logger, helpers, filechecker -def dbUpdate(ComicIDList=None, calledfrom=None): +def dbUpdate(ComicIDList=None, calledfrom=None, sched=False): if mylar.IMPORTLOCK: logger.info('Import is currently running - deferring this until the next scheduled run sequence.') return myDB = db.DBConnection() - #print "comicidlist:" + str(ComicIDList) if ComicIDList is None: if mylar.UPDATE_ENDED: logger.info('Updating only Continuing Series (option enabled) - this might cause problems with the pull-list matching for rebooted series') comiclist = [] - completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LatestDate DESC, LastUpdated ASC') + completelist = myDB.select('SELECT LatestDate, ComicPublished, ForceContinuing, NewPublish, LastUpdated, ComicID, ComicName, Corrected_SeriesYear, ComicYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, LatestDate ASC') for comlist in completelist: if comlist['LatestDate'] is None: recentstatus = 'Loading' @@ -66,19 +66,22 @@ def dbUpdate(ComicIDList=None, calledfrom=None): "Corrected_SeriesYear": comlist['Corrected_SeriesYear']}) else: - comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE Status="Active" or Status="Loading" order by LatestDate DESC, LastUpdated ASC') + comiclist = myDB.select('SELECT LatestDate, LastUpdated, ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE Status="Active" or Status="Loading" order by LastUpdated DESC, latestDate ASC') else: comiclist = [] comiclisting = ComicIDList for cl in comiclisting: - comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear from comics WHERE ComicID=?', [cl]) + comiclist += myDB.select('SELECT ComicID, ComicName, ComicYear, Corrected_SeriesYear, LastUpdated from comics WHERE ComicID=? order by LastUpdated DESC, LatestDate ASC', [cl]) - if calledfrom is None: + if all([sched is False, calledfrom is None]): logger.info('Starting update for %i active comics' % len(comiclist)) cnt = 1 - for comic in comiclist: + for comic in sorted(comiclist, key=operator.itemgetter('LastUpdated'), reverse=True): + if sched is True: + # since this runs every 5 minutes, take the 1st entry only... + logger.info('[UPDATER] Starting update for %s [%s] - last updated: %s' % (comiclist[0]['ComicName'], comiclist[0]['ComicYear'], comiclist[0]['LastUpdated'])) dspyear = comic['ComicYear'] csyear = None @@ -100,14 +103,14 @@ def dbUpdate(ComicIDList=None, calledfrom=None): absdiff = abs(n_date - c_obj_date) hours = (absdiff.days * 24 * 60 * 60 + absdiff.seconds) / 3600.0 if hours < 5: - logger.info(ComicName + '[' + str(ComicID) + '] Was refreshed less than 5 hours ago. Skipping Refresh at this time.') + logger.fdebug(ComicName + '[' + str(ComicID) + '] Was refreshed less than 5 hours ago. Skipping Refresh at this time.') cnt +=1 continue logger.info('[' + str(cnt) + '/' + str(len(comiclist)) + '] Refreshing :' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']') else: ComicID = comic['ComicID'] ComicName = comic['ComicName'] - + logger.fdebug('Refreshing: ' + ComicName + ' (' + str(dspyear) + ') [' + str(ComicID) + ']') mismatch = "no" @@ -124,6 +127,9 @@ def dbUpdate(ComicIDList=None, calledfrom=None): cchk = importer.addComictoDB(ComicID, mismatch) else: if mylar.CV_ONETIMER == 1: + if sched is True: + helpers.job_management(write=True, job='DB Updater', current_run=helpers.utctimestamp(), status='Running') + mylar.UPDATER_STATUS = 'Running' logger.fdebug("CV_OneTimer option enabled...") #in order to update to JUST CV_ONLY, we need to delete the issues for a given series so it's a clea$ logger.fdebug("Gathering the status of all issues for the series.") @@ -289,7 +295,12 @@ def dbUpdate(ComicIDList=None, calledfrom=None): cchk = mylar.importer.addComictoDB(ComicID, mismatch) cnt +=1 - time.sleep(15) #pause for 15 secs so dont hammer CV and get 500 error + if sched is False: + time.sleep(15) #pause for 15 secs so dont hammer CV and get 500 error + else: + helpers.job_management(write=True, job='DB Updater', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.UPDATER_STATUS = 'Waiting' + break logger.info('Update complete') @@ -363,7 +374,6 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, return else: issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Issue_Number=?", [ComicID, IssueNumber]).fetchone() - if issuechk is None and altissuenumber is not None: logger.info('altissuenumber is : ' + str(altissuenumber)) issuechk = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Int_IssueNumber=?", [ComicID, helpers.issuedigits(altissuenumber)]).fetchone() @@ -396,17 +406,16 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, cchk = mylar.importer.updateissuedata(ComicID, ComicName, calledfrom='weeklycheck')#mylar.importer.addComictoDB(ComicID,mismatch,pullupd) else: logger.fdebug('It has not been longer than 5 hours since we last did this...we will wait so we do not hammer things.') - else: logger.fdebug('[WEEKLY-PULL] Walksoftly has been enabled. ComicID/IssueID control given to the ninja to monitor.') - #logger.fdebug('hours: ' + str(hours) + ' -- forcecheck: ' + str(forcecheck)) + logger.fdebug('hours: ' + str(hours) + ' -- forcecheck: ' + str(forcecheck)) if hours > 2 or forcecheck == 'yes': logger.fdebug('weekinfo:' + str(weekinfo)) mylar.PULL_REFRESH = datetime.datetime.today() #update the PULL_REFRESH mylar.config_write() chkitout = mylar.locg.locg(weeknumber=str(weekinfo['weeknumber']),year=str(weekinfo['year'])) - + logger.fdebug('linking ComicID to Pull-list to reflect status.') downstats = {"ComicID": ComicID, "IssueID": None, @@ -424,7 +433,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, if issuechk['Issue_Number'] == IssueNumber or issuechk['Issue_Number'] == altissuenumber: og_status = issuechk['Status'] #check for 'out-of-whack' series here. - whackness = dbUpdate([ComicID], calledfrom='weekly') + whackness = dbUpdate([ComicID], calledfrom='weekly', sched=False) if whackness == True: if any([issuechk['Status'] == 'Downloaded', issuechk['Status'] == 'Archived', issuechk['Status'] == 'Snatched']): logger.fdebug('Forcibly maintaining status of : ' + og_status + ' for #' + issuechk['Issue_Number'] + ' to ensure integrity.') @@ -473,7 +482,7 @@ def upcoming_update(ComicID, ComicName, IssueNumber, IssueDate, forcecheck=None, newValue['Status'] = "Skipped" #was in wrong place :( else: - logger.fdebug('Issues do not match for some reason...weekly new issue: ' + str(IssueNumber)) + logger.fdebug('Issues do not match for some reason...weekly new issue: %s' % IssueNumber) return if mylar.AUTOWANT_UPCOMING: diff --git a/mylar/versioncheckit.py b/mylar/versioncheckit.py index 8ae19053..eae76e66 100644 --- a/mylar/versioncheckit.py +++ b/mylar/versioncheckit.py @@ -17,14 +17,18 @@ from __future__ import with_statement import mylar -from mylar import logger +from mylar import logger, helpers, versioncheck -#import threading class CheckVersion(): def __init__(self): pass def run(self): logger.info('[VersionCheck] Checking for new release on Github.') - mylar.versioncheck.checkGithub() + helpers.job_management(write=True, job='Check Version', current_run=helpers.utctimestamp(), status='Running') + mylar.VERSION_STATUS = 'Running' + versioncheck.checkGithub() + helpers.job_management(write=True, job='Check Version', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.VERSION_STATUS = 'Waiting' + logger.info('updated') return diff --git a/mylar/webserve.py b/mylar/webserve.py index ddaf069c..eda35987 100644 --- a/mylar/webserve.py +++ b/mylar/webserve.py @@ -37,7 +37,7 @@ import shutil import mylar -from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers #,rsscheck +from mylar import logger, db, importer, mb, search, filechecker, helpers, updater, parseit, weeklypull, PostProcessor, librarysync, moveit, Failed, readinglist, notifiers import simplejson as simplejson @@ -1033,9 +1033,8 @@ class WebInterface(object): #forcerss = True #threading.Thread(target=mylar.rsscheck.tehMain, args=[True]).start() #this is for use with the new scheduler not in place yet. - forcethis = mylar.rsscheckit.tehMain(forcerss=True) - threading.Thread(target=forcethis.run).start() - return + forcethis = mylar.rsscheckit.tehMain() + threading.Thread(target=forcethis.run, args=[True]).start() force_rss.exposed = True def markannuals(self, ann_action=None, **args): @@ -1234,7 +1233,6 @@ class WebInterface(object): else: if mylar.ENABLE_SNATCH_SCRIPT: #packs not supported on retry atm - Volume and Issuedate also not included due to limitations... - snatch_vars = {'comicinfo': {'comicname': ComicName, 'issuenumber': IssueNumber, 'seriesyear': ComicYear, @@ -1257,9 +1255,9 @@ class WebInterface(object): logger.info('Successfully retried issue.') break else: - ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN annuals as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() - if ckthis is None: - ckthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.Volume, a.ComicYear, b.IssueID, b.IssueNumber, b.IssueDate FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + chkthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.Issue_Number, b.IssueDate FROM comics as a INNER JOIN annuals as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() + if chkthis is None: + chkthis = myDB.selectone('SELECT a.ComicID, a.ComicName, a.ComicVersion, a.ComicYear, b.IssueID, b.Issue_Number, b.IssueDate FROM comics as a INNER JOIN issues as b ON a.ComicID = b.ComicID WHERE IssueID=?', [IssueID]).fetchone() modcomicname = chkthis['ComicName'] else: modcomicname = chkthis['ComicName'] + ' Annual' @@ -1267,10 +1265,12 @@ class WebInterface(object): comicinfo = [] comicinfo.append({"ComicName": chkthis['ComicName'], "ComicVolume": chkthis['ComicVersion'], - "IssueNumber": chkthis['IssueNumber'], + "IssueNumber": chkthis['Issue_Number'], "comyear": chkthis['ComicYear'], "IssueDate": chkthis['IssueDate'], - "modcomicname": modcomicname}) + "pack": False, + "modcomicname": modcomicname, + "oneoff": False}) newznabinfo = None @@ -1518,7 +1518,7 @@ class WebInterface(object): myDB.upsert("annuals", newValueDict, controlValueDict) else: myDB.upsert("issues", newValueDict, controlValueDict) - raise cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) + #cherrypy.HTTPRedirect("comicDetails?ComicID=%s" % ComicID) else: #if ComicName is not None, then it's from the FuturePull list that we're 'unwanting' an issue. #ComicID may be present if it's a watch from the Watchlist, otherwise it won't exist. @@ -1610,17 +1610,18 @@ class WebInterface(object): return pullSearch.exposed = True - def pullist(self, week=None, year=None): + def pullist(self, week=None, year=None, generateonly=False): myDB = db.DBConnection() - autowants = myDB.select("SELECT * FROM futureupcoming WHERE Status='Wanted'") autowant = [] - if autowants: - for aw in autowants: - autowant.append({"ComicName": aw['ComicName'], - "IssueNumber": aw['IssueNumber'], - "Publisher": aw['Publisher'], - "Status": aw['Status'], - "DisplayComicName": aw['DisplayComicName']}) + if generateonly is False: + autowants = myDB.select("SELECT * FROM futureupcoming WHERE Status='Wanted'") + if autowants: + for aw in autowants: + autowant.append({"ComicName": aw['ComicName'], + "IssueNumber": aw['IssueNumber'], + "Publisher": aw['Publisher'], + "Status": aw['Status'], + "DisplayComicName": aw['DisplayComicName']}) weeklyresults = [] wantedcount = 0 @@ -1644,7 +1645,7 @@ class WebInterface(object): else: logger.warn('Unable to populate the pull-list. Not continuing at this time (will try again in abit)') - if w_results is None: + if all([w_results is None, generateonly is False]): return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=0, weekinfo=weekinfo) watchlibrary = helpers.listLibrary() @@ -1676,8 +1677,11 @@ class WebInterface(object): haveit = "No" linkit = None - if all([weekly['ComicID'] is not None, weekly['ComicID'] != '']) and haveit == 'No': + if all([weekly['ComicID'] is not None, weekly['ComicID'] != '', haveit == 'No']) or haveit == 'OneOff': linkit = 'http://comicvine.gamespot.com/volume/4050-' + str(weekly['ComicID']) + else: + #setting it here will force it to set the link to the right comicid regardless of annuals or not + linkit = haveit x = None try: @@ -1731,10 +1735,14 @@ class WebInterface(object): weeklyresults = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False) else: self.manualpull() - if week: - return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + + if generateonly is True: + return weeklyresults, weekinfo else: - return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + if week: + return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) + else: + return serve_template(templatename="weeklypull.html", title="Weekly Pull", weeklyresults=weeklyresults, pullfilter=True, weekfold=weekinfo['week_folder'], wantedcount=wantedcount, weekinfo=weekinfo) pullist.exposed = True def removeautowant(self, comicname, release): @@ -2113,55 +2121,81 @@ class WebInterface(object): annualDelete.exposed = True + def previewRename(self, comicidlist): + myDB = db.DBConnection() + resultlist = [] + for comicid in comicidlist: + comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + comicdir = comic['ComicLocation'] + comicname = comic['ComicName'] + issue = myDB.selectone("SELECT * FROM issues WHERE ComicID=? AND Location is not None ORDER BY ReleaseDate", [comicid]).fetchone() + if 'annual' in issue['Location'].lower(): + annualize = 'yes' + else: + annualize = None + renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], issue['Location'], comicyear=None, issueid=issue['IssueID'], annualize=annualize) + resultlist.append({'original': issue['Location'], + 'new': renameiss['nfilename']}) + + + previewRename.exposed = True + def manualRename(self, comicid): if mylar.FILE_FORMAT == '': logger.error("You haven't specified a File Format in Configuration/Advanced") logger.error("Cannot rename files.") return + if len(comicid) > 1: + comiclist = comicid + else: + comiclist = [] + comiclist.append(comicid) myDB = db.DBConnection() - comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() - comicdir = comic['ComicLocation'] - comicname = comic['ComicName'] - extensions = ('.cbr', '.cbz', '.cb7') - issues = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comicid]) - if mylar.ANNUALS_ON: - issues += myDB.select("SELECT * FROM annuals WHERE ComicID=?", [comicid]) - comfiles = [] filefind = 0 - if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) != comicdir: - logger.fdebug('multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS) - logger.fdebug('dir: ' + comicdir) - logger.fdebug('os.path.basename: ' + os.path.basename(comicdir)) - pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) + for comicid in comiclist: + comic = myDB.selectone("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone() + comicdir = comic['ComicLocation'] + comicname = comic['ComicName'] + extensions = ('.cbr', '.cbz', '.cb7') + issues = myDB.select("SELECT * FROM issues WHERE ComicID=?", [comicid]) + if mylar.ANNUALS_ON: + issues += myDB.select("SELECT * FROM annuals WHERE ComicID=?", [comicid]) + if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) != comicdir: + logger.fdebug('multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS) + logger.fdebug('dir: ' + comicdir) + logger.fdebug('os.path.basename: ' + os.path.basename(comicdir)) + pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicdir)) - for root, dirnames, filenames in os.walk(comicdir): - for filename in filenames: - if filename.lower().endswith(extensions): - #logger.info("filename being checked is : " + str(filename)) - for issue in issues: - if issue['Location'] == filename: - #logger.error("matched " + str(filename) + " to DB file " + str(issue['Location'])) - if 'annual' in issue['Location'].lower(): - annualize = 'yes' - else: - annualize = None - renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=issue['IssueID'], annualize=annualize) - nfilename = renameiss['nfilename'] - srciss = os.path.join(comicdir, filename) - if filename != nfilename: - logger.info('Renaming ' + filename + ' ... to ... ' + renameiss['nfilename']) - try: - shutil.move(srciss, renameiss['destination_dir']) - except (OSError, IOError): - logger.error('Failed to move files - check directories and manually re-run.') - return - filefind+=1 - else: - logger.info('Not renaming ' + filename + ' as it is in desired format already.') - #continue + for root, dirnames, filenames in os.walk(comicdir): + for filename in filenames: + if filename.lower().endswith(extensions): + #logger.info("filename being checked is : " + str(filename)) + for issue in issues: + if issue['Location'] == filename: + #logger.error("matched " + str(filename) + " to DB file " + str(issue['Location'])) + if 'annual' in issue['Location'].lower(): + annualize = 'yes' + else: + annualize = None + renameiss = helpers.rename_param(comicid, comicname, issue['Issue_Number'], filename, comicyear=None, issueid=issue['IssueID'], annualize=annualize) + nfilename = renameiss['nfilename'] + srciss = os.path.join(comicdir, filename) + if filename != nfilename: + logger.info('Renaming ' + filename + ' ... to ... ' + renameiss['nfilename']) + try: + shutil.move(srciss, renameiss['destination_dir']) + except (OSError, IOError): + logger.error('Failed to move files - check directories and manually re-run.') + return + filefind+=1 + else: + logger.info('Not renaming ' + filename + ' as it is in desired format already.') + #continue logger.info('I have renamed ' + str(filefind) + ' issues of ' + comicname) updater.forceRescan(comicid) + if len(comiclist) > 1: + logger.info('[RENAMER] %s series have been renamed.' % len(comiclist)) manualRename.exposed = True def searchScan(self, name): @@ -2170,9 +2204,108 @@ class WebInterface(object): def manage(self): mylarRoot = mylar.DESTINATION_DIR - return serve_template(templatename="manage.html", title="Manage", mylarRoot=mylarRoot) + import db + myDB = db.DBConnection() + jobresults = myDB.select('SELECT * FROM jobhistory') + if jobresults is not None: + tmp = [] + for jb in jobresults: + if jb['prev_run_datetime'] is not None: + try: + pr = (datetime.datetime.strptime(jb['prev_run_datetime'][:19], '%Y-%m-%d %H:%M:%S') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + except ValueError: + pr = (datetime.datetime.strptime(jb['prev_run_datetime'], '%Y-%m-%d %H:%M:%S.%f') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + prev_run = datetime.datetime.fromtimestamp(pr) + else: + prev_run = None + if jb['prev_run_datetime'] is not None: + try: + nr = (datetime.datetime.strptime(jb['next_run_datetime'][:19], '%Y-%m-%d %H:%M:%S') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + except ValueError: + nr = (datetime.datetime.strptime(jb['next_run_datetime'], '%Y-%m-%d %H:%M:%S.%f') - datetime.datetime.utcfromtimestamp(0)).total_seconds() + next_run = datetime.datetime.fromtimestamp(nr) + else: + next_run = None + if 'rss' in jb['JobName'].lower(): + status = mylar.RSS_STATUS + interval = str(mylar.RSS_CHECKINTERVAL) + ' mins' + if 'weekly' in jb['JobName'].lower(): + status = mylar.WEEKLY_STATUS + if mylar.ALT_PULL == 2: interval = '4 hrs' + else: interval = '24 hrs' + if 'search' in jb['JobName'].lower(): + status = mylar.SEARCH_STATUS + interval = str(mylar.SEARCH_INTERVAL) + ' mins' + if 'updater' in jb['JobName'].lower(): + status = mylar.UPDATER_STATUS + interval = '5 mins' + if 'folder' in jb['JobName'].lower(): + status = mylar.MONITOR_STATUS + interval = str(mylar.DOWNLOAD_SCAN_INTERVAL) + ' mins' + if 'version' in jb['JobName'].lower(): + status = mylar.VERSION_STATUS + interval = str(mylar.CHECK_GITHUB_INTERVAL) + 'mins' + + tmp.append({'prev_run_datetime': prev_run, + 'next_run_datetime': next_run, + 'interval': interval, + 'jobname': jb['JobName'], + 'status': status}) + jobresults = tmp + return serve_template(templatename="manage.html", title="Manage", mylarRoot=mylarRoot, jobs=jobresults) manage.exposed = True + def jobmanage(self, job, mode): + logger.info('%s : %s' % (job, mode)) + jobid = None + job_id_map = {'DB Updater': 'dbupdater', 'Auto-Search': 'search', 'RSS Feeds': 'rss', 'Weekly Pullist': 'weekly', 'Check Version': 'version', 'Folder Monitor': 'monitor'} + for k,v in job_id_map.iteritems(): + if k == job: + jobid = v + break + logger.info('jobid: %s' % jobid) + if jobid is not None: + myDB = db.DBConnection() + if mode == 'pause': + mylar.SCHED.pause_job(jobid) + logger.info('[%s] Paused scheduled runtime.' % job) + ctrl = {'JobName': job} + val = {'Status': 'Paused'} + myDB.upsert('jobhistory', val, ctrl) + elif mode == 'resume': + mylar.SCHED.resume_job(jobid) + logger.info('[%s] Resumed scheduled runtime.' % job) + ctrl = {'JobName': job} + val = {'Status': 'Waiting'} + myDB.upsert('jobhistory', val, ctrl) + + else: + logger.warn('%s cannot be matched against any scheduled jobs - maybe you should restart?' % job) + + jobmanage.exposed = True + + def schedulerForceCheck(self, jobid): + from apscheduler.triggers.date import DateTrigger + for jb in mylar.SCHED.get_jobs(): + #logger.info('jb : %s' % jb) + if jobid.lower() in str(jb).lower(): + logger.info('[%s] Now force submitting job.' % jb) + if jobid == 'rss': + mylar.SCHED.add_job(func=jb.func, args=[True], trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'weekly': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'search': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'version': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'updater': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + elif jobid == 'monitor': + mylar.SCHED.add_job(func=jb.func, trigger=DateTrigger(run_date=datetime.datetime.now())) + break + + schedulerForceCheck.exposed = True + def manageComics(self): comics = helpers.havetotals() return serve_template(templatename="managecomics.html", title="Manage Comics", comics=comics) @@ -2223,12 +2356,6 @@ class WebInterface(object): return serve_template(templatename="managefailed.html", title="Failed DB Management", failed=results) manageFailed.exposed = True - def manageNew(self): - myDB = db.DBConnection() - newcomics = myDB.select('SELECT * from newartists') - return serve_template(templatename="managenew.html", title="Manage New Artists", newcomics=newcomics) - manageNew.exposed = True - def flushImports(self): myDB = db.DBConnection() myDB.action('DELETE from importresults') @@ -2345,6 +2472,9 @@ class WebInterface(object): elif action == 'metatag': logger.info('[MANAGE COMICS][MASS METATAGGING] Now Metatagging Files for ' + str(len(comicsToAdd)) + ' series') threading.Thread(target=self.forceRescan, args=[comicsToAdd,True,'metatag']).start() + elif action == 'rename': + logger.info('[MANAGE COMICS][MASS RENAMING] Now Renaming Files for ' + str(len(comicsToAdd)) + ' series') + threading.Thread(target=self.manualRename, args=[comicsToAdd]).start() else: logger.info('[MANAGE COMICS][REFRESH] Refreshing ' + str(len(comicsToAdd)) + ' series') threading.Thread(target=updater.dbUpdate, args=[comicsToAdd]).start() @@ -3361,7 +3491,7 @@ class WebInterface(object): myDB = db.DBConnection() if type == 'all': logger.info(u"Clearing all history") - myDB.action('DELETE from snatched') + myDB.action('DELETE * from snatched') else: logger.info(u"Clearing history where status is %s" % type) myDB.action('DELETE from snatched WHERE Status=?', [type]) @@ -3517,6 +3647,9 @@ class WebInterface(object): #save the values so they stick. mylar.ADD_COMICS = autoadd + if 'windows' in mylar.OS_DETECT.lower(): + #to handle long paths, let's append the '\\?\' to the path to allow for unicode windows api access + path = "\\\\?\\" + path mylar.COMIC_DIR = path mylar.IMP_MOVE = imp_move mylar.IMP_RENAME = imp_rename @@ -4125,7 +4258,6 @@ class WebInterface(object): "COUNT_HAVES": COUNT_HAVES, "COUNT_ISSUES": COUNT_ISSUES, "COUNT_SIZE": COUNT_SIZE} - config = { "comicvine_api": mylar.COMICVINE_API, "http_host": mylar.HTTP_HOST, @@ -4224,6 +4356,7 @@ class WebInterface(object): "extra_newznabs": sorted(mylar.EXTRA_NEWZNABS, key=itemgetter(5), reverse=True), "enable_rss": helpers.checked(mylar.ENABLE_RSS), "rss_checkinterval": mylar.RSS_CHECKINTERVAL, + "rss_last": datetime.datetime.fromtimestamp(mylar.SCHED_RSS_LAST).replace(microsecond=0), "provider_order": mylar.PROVIDER_ORDER, "enable_torrents": helpers.checked(mylar.ENABLE_TORRENTS), "minseeds": mylar.MINSEEDS, @@ -5005,7 +5138,7 @@ class WebInterface(object): getComicArtwork.exposed = True def findsabAPI(self, sabhost=None, sabusername=None, sabpassword=None): - import sabparse + from mylar import sabparse sabapi = sabparse.sabnzbd(sabhost, sabusername, sabpassword) logger.info('SAB NZBKey found as : ' + str(sabapi) + '. You still have to save the config to retain this setting.') mylar.SAB_APIKEY = sabapi @@ -5112,7 +5245,7 @@ class WebInterface(object): IssueInfo.exposed = True - def manual_metatag(self, dirName, issueid, filename, comicid, comversion, seriesyear=None): + def manual_metatag(self, dirName, issueid, filename, comicid, comversion, seriesyear=None, group=False): module = '[MANUAL META-TAGGING]' try: import cmtagmylar @@ -5148,7 +5281,8 @@ class WebInterface(object): else: logger.fdebug('Failed to remove temporary directory: ' + cache_dir) - updater.forceRescan(comicid) + if group is False: + updater.forceRescan(comicid) manual_metatag.exposed = True @@ -5165,7 +5299,8 @@ class WebInterface(object): meta_dir = dirName for ginfo in groupinfo: #if multiple_dest_dirs is in effect, metadir will be pointing to the wrong location and cause a 'Unable to create temporary cache location' error message - self.manual_metatag(meta_dir, ginfo['IssueID'], os.path.join(meta_dir, ginfo['Location']), ComicID, comversion=cinfo['ComicVersion'], seriesyear=cinfo['ComicYear']) + self.manual_metatag(meta_dir, ginfo['IssueID'], os.path.join(meta_dir, ginfo['Location']), ComicID, comversion=cinfo['ComicVersion'], seriesyear=cinfo['ComicYear'], group=True) + updater.forceRescan(ComicID) logger.info('[SERIES-METATAGGER][' + cinfo['ComicName'] + ' (' + cinfo['ComicYear'] + ')] Finished doing a complete series (re)tagging of metadata.') group_metatag.exposed = True @@ -5343,9 +5478,98 @@ class WebInterface(object): import auth32p tmp = auth32p.info32p(test=True) rtnvalues = tmp.authenticate() - if rtnvalues is True: - return "Successfully Authenticated." + if rtnvalues['status'] is True: + return json.dumps({"status": "Successfully Authenticated.", "inkdrops": mylar.INKDROPS_32P}) else: - return "Could not Authenticate." + return json.dumps({"status": "Could not Authenticate.", "inkdrops": mylar.INKDROPS_32P}) test_32p.exposed = True + + def create_readlist(self, list=None, weeknumber=None, year=None): + # ({ + # "PUBLISHER": weekly['PUBLISHER'], + # "ISSUE": weekly['ISSUE'], + # "COMIC": weekly['COMIC'], + # "STATUS": tmp_status, + # "COMICID": weekly['ComicID'], + # "ISSUEID": weekly['IssueID'], + # "HAVEIT": haveit, + # "LINK": linkit, + # "AUTOWANT": False + # }) + issuelist = [] + logger.info('weeknumber: %s' % weeknumber) + logger.info('year: %s' % year) + weeklyresults = [] + if weeknumber is not None: + myDB = db.DBConnection() + w_results = myDB.select("SELECT * from weekly WHERE weeknumber=? AND year=?", [int(weeknumber),int(year)]) + watchlibrary = helpers.listLibrary() + issueLibrary = helpers.listIssues(weeknumber, year) + oneofflist = helpers.listoneoffs(weeknumber, year) + for weekly in w_results: + xfound = False + tmp_status = weekly['Status'] + issdate = None + if weekly['ComicID'] in watchlibrary: + haveit = watchlibrary[weekly['ComicID']] + + if all([mylar.AUTOWANT_UPCOMING, tmp_status == 'Skipped']): + tmp_status = 'Wanted' + + for x in issueLibrary: + if weekly['IssueID'] == x['IssueID']: + xfound = True + tmp_status = x['Status'] + issdate = x['IssueYear'] + break + + else: + xlist = [x['Status'] for x in oneofflist if x['IssueID'] == weekly['IssueID']] + if xlist: + haveit = 'OneOff' + tmp_status = xlist[0] + issdate = None + else: + haveit = "No" + + x = None + try: + x = float(weekly['ISSUE']) + except ValueError, e: + if 'au' in weekly['ISSUE'].lower() or 'ai' in weekly['ISSUE'].lower() or '.inh' in weekly['ISSUE'].lower() or '.now' in weekly['ISSUE'].lower() or '.mu' in weekly['ISSUE'].lower(): + x = weekly['ISSUE'] + + if x is not None: + weeklyresults.append({ + "PUBLISHER": weekly['PUBLISHER'], + "ISSUE": weekly['ISSUE'], + "COMIC": weekly['COMIC'], + "STATUS": tmp_status, + "COMICID": weekly['ComicID'], + "ISSUEID": weekly['IssueID'], + "HAVEIT": haveit, + "ISSUEDATE": issdate + }) + weeklylist = sorted(weeklyresults, key=itemgetter('PUBLISHER', 'COMIC'), reverse=False) + for ab in weeklylist: + if ab['HAVEIT'] == ab['COMICID']: + lb = myDB.selectone('SELECT ComicVersion, Type, ComicYear from comics WHERE ComicID=?', [ab['COMICID']]).fetchone() + issuelist.append({'IssueNumber': ab['ISSUE'], + 'ComicName': ab['COMIC'], + 'ComicID': ab['COMICID'], + 'IssueID': ab['ISSUEID'], + 'Status': ab['STATUS'], + 'Publisher': ab['PUBLISHER'], + 'ComicVolume': lb['ComicVersion'], + 'ComicYear': lb['ComicYear'], + 'ComicType': lb['Type'], + 'IssueYear': ab['ISSUEDATE']}) + + from mylar import cbl + ab = cbl.dict2xml(issuelist) + #a = cbl.CreateList(issuelist) + #ab = a.createComicRackReadlist() + logger.info('returned.') + logger.info(ab) + create_readlist.exposed = True diff --git a/mylar/weeklypull.py b/mylar/weeklypull.py index fe32a4ff..948f3b67 100755 --- a/mylar/weeklypull.py +++ b/mylar/weeklypull.py @@ -1366,151 +1366,150 @@ def future_check(): chkfuture = myDB.select("SELECT * FROM futureupcoming WHERE IssueNumber='1' OR IssueNumber='0'") #is not NULL") if chkfuture is None: logger.info("There are not any series on your future-list that I consider to be a NEW series") - return - - cflist = [] - #load in the values on an entry-by-entry basis into a tuple, so that we can query the sql clean again. - for cf in chkfuture: - cflist.append({"ComicName": cf['ComicName'], - "IssueDate": cf['IssueDate'], - "IssueNumber": cf['IssueNumber'], #this should be all #1's as the sql above limits the hits. - "Publisher": cf['Publisher'], - "Status": cf['Status']}) - logger.fdebug('cflist: ' + str(cflist)) - #now we load in - if len(cflist) == 0: - logger.info('No series have been marked as being on auto-watch.') - return - logger.info('I will be looking to see if any information has been released for ' + str(len(cflist)) + ' series that are NEW series') - #limit the search to just the 'current year' since if it's anything but a #1, it should have associated data already. - #limittheyear = [] - #limittheyear.append(cf['IssueDate'][-4:]) - search_results = [] - - for ser in cflist: - matched = False - theissdate = ser['IssueDate'][-4:] - if not theissdate.startswith('20'): - theissdate = ser['IssueDate'][:4] - logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') - searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=theissdate, explicit='all') - if len(searchresults) > 0: - if len(searchresults) > 1: - logger.info('More than one result returned - this may have to be a manual add, but I\'m going to try to figure it out myself first.') - matches = [] - logger.fdebug('Publisher of series to be added: ' + str(ser['Publisher'])) - for sr in searchresults: - logger.fdebug('Comparing ' + sr['name'] + ' - to - ' + ser['ComicName']) - tmpsername = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', ser['ComicName']).strip() - tmpsrname = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', sr['name']).strip() - tmpsername = re.sub('\-', '', tmpsername) - if tmpsername.lower().startswith('the '): - tmpsername = re.sub('the ', '', tmpsername.lower()).strip() - else: - tmpsername = re.sub(' the ', '', tmpsername.lower()).strip() - tmpsrname = re.sub('\-', '', tmpsrname) - if tmpsrname.lower().startswith('the '): - tmpsrname = re.sub('the ', '', tmpsrname.lower()).strip() - else: - tmpsrname = re.sub(' the ', '', tmpsrname.lower()).strip() - - tmpsername = re.sub(' and ', '', tmpsername.lower()).strip() - tmpsername = re.sub(' & ', '', tmpsername.lower()).strip() - tmpsrname = re.sub(' and ', '', tmpsrname.lower()).strip() - tmpsrname = re.sub(' & ', '', tmpsrname.lower()).strip() - - #append the cleaned-up name to get searched later against if necessary. - search_results.append({'name': tmpsrname, - 'comicid': sr['comicid']}) - - tmpsername = re.sub('\s', '', tmpsername).strip() - tmpsrname = re.sub('\s', '', tmpsrname).strip() - - logger.fdebug('Comparing modified names: ' + tmpsrname + ' - to - ' + tmpsername) - if tmpsername.lower() == tmpsrname.lower(): - logger.fdebug('Name matched successful: ' + sr['name']) - if str(sr['comicyear']) == str(theissdate): - logger.fdebug('Matched to : ' + str(theissdate)) - matches.append(sr) - - if len(matches) == 1: - logger.info('Narrowed down to one series as a direct match: ' + matches[0]['name'] + '[' + str(matches[0]['comicid']) + ']') - cid = matches[0]['comicid'] - matched = True - else: - logger.info('Unable to determine a successful match at this time (this is still a WIP so it will eventually work). Not going to attempt auto-adding at this time.') - catch_words = ('the', 'and', '&', 'to') - for pos_match in search_results: - logger.info(pos_match) - length_match = len(pos_match['name']) / len(ser['ComicName']) - logger.fdebug('length match differential set for an allowance of 20%') - logger.fdebug('actual differential in length between result and series title: ' + str((length_match * 100)-100) + '%') - if ((length_match * 100)-100) > 20: - logger.fdebug('there are too many extra words to consider this as match for the given title. Ignoring this result.') - continue - new_match = pos_match['name'].lower() - split_series = ser['ComicName'].lower().split() - for cw in catch_words: - for x in new_match.split(): - #logger.fdebug('comparing x: ' + str(x) + ' to cw: ' + str(cw)) - if x == cw: - new_match = re.sub(x, '', new_match) - - split_match = new_match.split() - word_match = 0 - i = 0 - for ss in split_series: - try: - matchword = split_match[i].lower() - except: - break - - if any([x == matchword for x in catch_words]): - #logger.fdebug('[MW] common word detected of : ' + matchword) - word_match+=.5 - elif any([cw == ss for cw in catch_words]): - #logger.fdebug('[CW] common word detected of : ' + matchword) - word_match+=.5 - else: - try: - #will return word position in string. - #logger.fdebug('word match to position found in both strings at position : ' + str(split_match.index(ss))) - if split_match.index(ss) == split_series.index(ss): - word_match+=1 - except ValueError: - break - i+=1 - logger.fdebug('word match score of : ' + str(word_match) + ' / ' + str(len(split_series))) - if word_match == len(split_series) or (word_match / len(split_series)) > 80: - logger.fdebug('[' + pos_match['name'] + '] considered a match - word matching percentage is greater than 80%. Attempting to auto-add series into watchlist.') - cid = pos_match['comicid'] - matched = True - - if matched: - #we should probably load all additional issues for the series on the futureupcoming list that are marked as Wanted and then - #throw them to the importer as a tuple, and once imported the import can run the additional search against them. - #now we scan for additional issues of the same series on the upcoming list and mark them accordingly. - chkthewanted = [] - chkwant = myDB.select("SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber != '1' AND Status='Wanted'", [ser['ComicName']]) - if chkwant is None: - logger.info('No extra issues to mark at this time for ' + ser['ComicName']) - else: - for chk in chkwant: - chkthewanted.append({"ComicName": chk['ComicName'], - "IssueDate": chk['IssueDate'], - "IssueNumber": chk['IssueNumber'], #this should be all #1's as the sql above limits the hits. - "Publisher": chk['Publisher'], - "Status": chk['Status']}) - - logger.info('Marking ' + str(len(chkthewanted)) + ' additional issues as Wanted from ' + ser['ComicName'] + ' series as requested.') - - future_check_add(cid, ser, chkthewanted, theissdate) - + else: + cflist = [] + #load in the values on an entry-by-entry basis into a tuple, so that we can query the sql clean again. + for cf in chkfuture: + cflist.append({"ComicName": cf['ComicName'], + "IssueDate": cf['IssueDate'], + "IssueNumber": cf['IssueNumber'], #this should be all #1's as the sql above limits the hits. + "Publisher": cf['Publisher'], + "Status": cf['Status']}) + logger.fdebug('cflist: ' + str(cflist)) + #now we load in + if len(cflist) == 0: + logger.info('No series have been marked as being on auto-watch.') else: - logger.info('No series information available as of yet for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') - continue + logger.info('I will be looking to see if any information has been released for ' + str(len(cflist)) + ' series that are NEW series') + #limit the search to just the 'current year' since if it's anything but a #1, it should have associated data already. + #limittheyear = [] + #limittheyear.append(cf['IssueDate'][-4:]) + search_results = [] - logger.info('Finished attempting to auto-add new series.') + for ser in cflist: + matched = False + theissdate = ser['IssueDate'][-4:] + if not theissdate.startswith('20'): + theissdate = ser['IssueDate'][:4] + logger.info('looking for new data for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') + searchresults, explicit = mb.findComic(ser['ComicName'], mode='pullseries', issue=ser['IssueNumber'], limityear=theissdate, explicit='all') + if len(searchresults) > 0: + if len(searchresults) > 1: + logger.info('More than one result returned - this may have to be a manual add, but I\'m going to try to figure it out myself first.') + matches = [] + logger.fdebug('Publisher of series to be added: ' + str(ser['Publisher'])) + for sr in searchresults: + logger.fdebug('Comparing ' + sr['name'] + ' - to - ' + ser['ComicName']) + tmpsername = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', ser['ComicName']).strip() + tmpsrname = re.sub('[\'\*\^\%\$\#\@\!\/\,\.\:\(\)]', '', sr['name']).strip() + tmpsername = re.sub('\-', '', tmpsername) + if tmpsername.lower().startswith('the '): + tmpsername = re.sub('the ', '', tmpsername.lower()).strip() + else: + tmpsername = re.sub(' the ', '', tmpsername.lower()).strip() + tmpsrname = re.sub('\-', '', tmpsrname) + if tmpsrname.lower().startswith('the '): + tmpsrname = re.sub('the ', '', tmpsrname.lower()).strip() + else: + tmpsrname = re.sub(' the ', '', tmpsrname.lower()).strip() + + tmpsername = re.sub(' and ', '', tmpsername.lower()).strip() + tmpsername = re.sub(' & ', '', tmpsername.lower()).strip() + tmpsrname = re.sub(' and ', '', tmpsrname.lower()).strip() + tmpsrname = re.sub(' & ', '', tmpsrname.lower()).strip() + + #append the cleaned-up name to get searched later against if necessary. + search_results.append({'name': tmpsrname, + 'comicid': sr['comicid']}) + + tmpsername = re.sub('\s', '', tmpsername).strip() + tmpsrname = re.sub('\s', '', tmpsrname).strip() + + logger.fdebug('Comparing modified names: ' + tmpsrname + ' - to - ' + tmpsername) + if tmpsername.lower() == tmpsrname.lower(): + logger.fdebug('Name matched successful: ' + sr['name']) + if str(sr['comicyear']) == str(theissdate): + logger.fdebug('Matched to : ' + str(theissdate)) + matches.append(sr) + + if len(matches) == 1: + logger.info('Narrowed down to one series as a direct match: ' + matches[0]['name'] + '[' + str(matches[0]['comicid']) + ']') + cid = matches[0]['comicid'] + matched = True + else: + logger.info('Unable to determine a successful match at this time (this is still a WIP so it will eventually work). Not going to attempt auto-adding at this time.') + catch_words = ('the', 'and', '&', 'to') + for pos_match in search_results: + logger.info(pos_match) + length_match = len(pos_match['name']) / len(ser['ComicName']) + logger.fdebug('length match differential set for an allowance of 20%') + logger.fdebug('actual differential in length between result and series title: ' + str((length_match * 100)-100) + '%') + if ((length_match * 100)-100) > 20: + logger.fdebug('there are too many extra words to consider this as match for the given title. Ignoring this result.') + continue + new_match = pos_match['name'].lower() + split_series = ser['ComicName'].lower().split() + for cw in catch_words: + for x in new_match.split(): + #logger.fdebug('comparing x: ' + str(x) + ' to cw: ' + str(cw)) + if x == cw: + new_match = re.sub(x, '', new_match) + + split_match = new_match.split() + word_match = 0 + i = 0 + for ss in split_series: + try: + matchword = split_match[i].lower() + except: + break + + if any([x == matchword for x in catch_words]): + #logger.fdebug('[MW] common word detected of : ' + matchword) + word_match+=.5 + elif any([cw == ss for cw in catch_words]): + #logger.fdebug('[CW] common word detected of : ' + matchword) + word_match+=.5 + else: + try: + #will return word position in string. + #logger.fdebug('word match to position found in both strings at position : ' + str(split_match.index(ss))) + if split_match.index(ss) == split_series.index(ss): + word_match+=1 + except ValueError: + break + i+=1 + logger.fdebug('word match score of : ' + str(word_match) + ' / ' + str(len(split_series))) + if word_match == len(split_series) or (word_match / len(split_series)) > 80: + logger.fdebug('[' + pos_match['name'] + '] considered a match - word matching percentage is greater than 80%. Attempting to auto-add series into watchlist.') + cid = pos_match['comicid'] + matched = True + + if matched: + #we should probably load all additional issues for the series on the futureupcoming list that are marked as Wanted and then + #throw them to the importer as a tuple, and once imported the import can run the additional search against them. + #now we scan for additional issues of the same series on the upcoming list and mark them accordingly. + chkthewanted = [] + chkwant = myDB.select("SELECT * FROM futureupcoming WHERE ComicName=? AND IssueNumber != '1' AND Status='Wanted'", [ser['ComicName']]) + if chkwant is None: + logger.info('No extra issues to mark at this time for ' + ser['ComicName']) + else: + for chk in chkwant: + chkthewanted.append({"ComicName": chk['ComicName'], + "IssueDate": chk['IssueDate'], + "IssueNumber": chk['IssueNumber'], #this should be all #1's as the sql above limits the hits. + "Publisher": chk['Publisher'], + "Status": chk['Status']}) + + logger.info('Marking ' + str(len(chkthewanted)) + ' additional issues as Wanted from ' + ser['ComicName'] + ' series as requested.') + + future_check_add(cid, ser, chkthewanted, theissdate) + + else: + logger.info('No series information available as of yet for ' + ser['ComicName'] + '[#' + str(ser['IssueNumber']) + '] (' + str(theissdate) + ')') + continue + + logger.info('Finished attempting to auto-add new series.') return def future_check_add(comicid, serinfo, chkthewanted=None, theissdate=None): diff --git a/mylar/weeklypullit.py b/mylar/weeklypullit.py index 4766ff74..5bbd6563 100755 --- a/mylar/weeklypullit.py +++ b/mylar/weeklypullit.py @@ -17,9 +17,7 @@ from __future__ import with_statement import mylar -from mylar import logger - -#import threading +from mylar import logger, helpers, weeklypull class Weekly(): def __init__(self): @@ -27,6 +25,10 @@ class Weekly(): def run(self): logger.info('[WEEKLY] Checking Weekly Pull-list for new releases/updates') - mylar.weeklypull.pullit() - mylar.weeklypull.future_check() - return + helpers.job_management(write=True, job='Weekly Pullist', current_run=helpers.utctimestamp(), status='Running') + mylar.WEEKLY_STATUS = 'Running' + weeklypull.pullit() + weeklypull.future_check() + helpers.job_management(write=True, job='Weekly Pullist', last_run_completed=helpers.utctimestamp(), status='Waiting') + mylar.WEEKLY_STATUS = 'Waiting' + diff --git a/post-processing/torrent-auto-snatch/getlftp.sh b/post-processing/torrent-auto-snatch/getlftp.sh index f60ffaaf..1fcafad8 100755 --- a/post-processing/torrent-auto-snatch/getlftp.sh +++ b/post-processing/torrent-auto-snatch/getlftp.sh @@ -25,12 +25,12 @@ fi source "$configfile" cd $LOCALCD -filename="$1" +filename="$downlocation" if [[ "${filename##*.}" == "cbr" || "${filename##*.}" == "cbz" ]]; then - LCMD="pget -n 6 '$1'" + LCMD="pget -n 6 '$filename'" else - LCMD="mirror -P 2 --use-pget-n=6 '$1'" + LCMD="mirror -P 2 --use-pget-n=6 '$filename'" fi if [[ -z $KEYFILE ]]; then