Changing from config.ini to config.yaml

This commit is contained in:
morpheus65535 2023-10-14 09:56:21 -04:00 committed by GitHub
parent d6579417ba
commit c89da3e619
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
158 changed files with 36452 additions and 735 deletions

View File

@ -162,7 +162,7 @@ class EpisodesSubtitles(Resource):
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, result, fake_provider=provider, fake_score=score)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
except OSError:

View File

@ -158,7 +158,7 @@ class MoviesSubtitles(Resource):
provider = "manual"
score = 120
history_log_movie(4, radarrId, result, fake_provider=provider, fake_score=score)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
except OSError:

View File

@ -141,7 +141,7 @@ class ProviderEpisodes(Resource):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
elif isinstance(result, str):

View File

@ -135,7 +135,7 @@ class ProviderMovies(Resource):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log_movie(2, radarrId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
if not settings.general.dont_notify_manual_actions:
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
elif isinstance(result, str):

View File

@ -116,7 +116,7 @@ class Subtitles(Resource):
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
if chmod:
os.chmod(subtitles_path, chmod)

View File

@ -24,12 +24,12 @@ class SystemAccount(Resource):
@api_ns_system_account.response(400, 'Unknown action')
@api_ns_system_account.response(403, 'Authentication failed')
@api_ns_system_account.response(406, 'Browser must be closed to invalidate basic authentication')
@api_ns_system_account.response(500, 'Unknown authentication type define in config.ini')
@api_ns_system_account.response(500, 'Unknown authentication type define in config')
def post(self):
"""Login or logout from Bazarr UI when using form login"""
args = self.post_request_parser.parse_args()
if settings.auth.type != 'form':
return 'Unknown authentication type define in config.ini', 500
return 'Unknown authentication type define in config', 500
action = args.get('action')
if action == 'login':

View File

@ -27,7 +27,7 @@ class Searches(Resource):
search_list = []
if query:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
# Get matching series
search_list += database.execute(
select(TableShows.title,
@ -36,7 +36,7 @@ class Searches(Resource):
.order_by(TableShows.title)) \
.all()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
# Get matching movies
search_list += database.execute(
select(TableMovies.title,

View File

@ -4,6 +4,7 @@ import json
from flask import request, jsonify
from flask_restx import Resource, Namespace
from dynaconf.validator import ValidationError
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableSettingsNotifier, \
update_profile_id_list, database, insert, update, delete, select
@ -97,9 +98,9 @@ class SystemSettings(Resource):
event_stream("languages")
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
# Update Notification
@ -112,6 +113,11 @@ class SystemSettings(Resource):
url=item['url'])
.where(TableSettingsNotifier.name == item['name']))
save_settings(zip(request.form.keys(), request.form.listvalues()))
event_stream("settings")
return '', 204
try:
save_settings(zip(request.form.keys(), request.form.listvalues()))
except ValidationError as e:
event_stream("settings")
return e.message, 406
else:
event_stream("settings")
return '', 204

View File

@ -77,7 +77,7 @@ def postprocess(item):
"hi": language[1] == 'hi',
}
)
if settings.general.getboolean('embedded_subs_show_desired') and item.get('profileId'):
if settings.general.embedded_subs_show_desired and item.get('profileId'):
desired_lang_list = get_desired_languages(item['profileId'])
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))

View File

@ -3,21 +3,21 @@
import hashlib
import os
import ast
import logging
from urllib.parse import quote_plus
from subliminal.cache import region
from simpleconfigparser import simpleconfigparser, configparser, NoOptionError
from dynaconf import Dynaconf, Validator as OriginalValidator
from dynaconf.loaders.yaml_loader import write
from dynaconf.validator import ValidationError
from dynaconf.utils.functional import empty
from ipaddress import ip_address
from binascii import hexlify
from types import MappingProxyType
from .get_args import args
class SimpleConfigParser(simpleconfigparser):
def get(self, section, option, raw=False, vars=None):
try:
return configparser.get(self, section, option, raw=raw, vars=vars)
except NoOptionError:
return None
NoneType = type(None)
def base_url_slash_cleaner(uri):
@ -26,275 +26,371 @@ def base_url_slash_cleaner(uri):
return uri
defaults = {
'general': {
'ip': '0.0.0.0',
'port': '6767',
'base_url': '',
'path_mappings': '[]',
'debug': 'False',
'branch': 'master',
'auto_update': 'True',
'single_language': 'False',
'minimum_score': '90',
'use_scenename': 'True',
'use_postprocessing': 'False',
'postprocessing_cmd': '',
'postprocessing_threshold': '90',
'use_postprocessing_threshold': 'False',
'postprocessing_threshold_movie': '70',
'use_postprocessing_threshold_movie': 'False',
'use_sonarr': 'False',
'use_radarr': 'False',
'path_mappings_movie': '[]',
'serie_default_enabled': 'False',
'serie_default_profile': '',
'movie_default_enabled': 'False',
'movie_default_profile': '',
'page_size': '25',
'theme': 'auto',
'page_size_manual_search': '10',
'minimum_score_movie': '70',
'use_embedded_subs': 'True',
'embedded_subs_show_desired': 'True',
'utf8_encode': 'True',
'ignore_pgs_subs': 'False',
'ignore_vobsub_subs': 'False',
'ignore_ass_subs': 'False',
'adaptive_searching': 'True',
'adaptive_searching_delay': '3w',
'adaptive_searching_delta': '1w',
'enabled_providers': '[]',
'multithreading': 'True',
'chmod_enabled': 'False',
'chmod': '0640',
'subfolder': 'current',
'subfolder_custom': '',
'upgrade_subs': 'True',
'upgrade_frequency': '12',
'days_to_upgrade_subs': '7',
'upgrade_manual': 'True',
'anti_captcha_provider': 'None',
'wanted_search_frequency': '6',
'wanted_search_frequency_movie': '6',
'subzero_mods': '[]',
'dont_notify_manual_actions': 'False',
'hi_extension': 'hi',
'embedded_subtitles_parser': 'ffprobe',
'default_und_audio_lang': '',
'default_und_embedded_subtitles_lang': '',
'parse_embedded_audio_track': 'False',
'skip_hashing': 'False',
'language_equals': '[]',
},
'auth': {
'type': 'None',
'username': '',
'password': ''
},
'cors': {
'enabled': 'False'
},
'backup': {
'folder': os.path.join(args.config_dir, 'backup'),
'retention': '31',
'frequency': 'Weekly',
'day': '6',
'hour': '3'
},
'sonarr': {
'ip': '127.0.0.1',
'port': '8989',
'base_url': '/',
'ssl': 'False',
'http_timeout': '60',
'apikey': '',
'full_update': 'Daily',
'full_update_day': '6',
'full_update_hour': '4',
'only_monitored': 'False',
'series_sync': '60',
'episodes_sync': '60',
'excluded_tags': '[]',
'excluded_series_types': '[]',
'use_ffprobe_cache': 'True',
'exclude_season_zero': 'False',
'defer_search_signalr': 'False'
},
'radarr': {
'ip': '127.0.0.1',
'port': '7878',
'base_url': '/',
'ssl': 'False',
'http_timeout': '60',
'apikey': '',
'full_update': 'Daily',
'full_update_day': '6',
'full_update_hour': '5',
'only_monitored': 'False',
'movies_sync': '60',
'excluded_tags': '[]',
'use_ffprobe_cache': 'True',
'defer_search_signalr': 'False'
},
'proxy': {
'type': 'None',
'url': '',
'port': '',
'username': '',
'password': '',
'exclude': '["localhost","127.0.0.1"]'
},
'opensubtitles': {
'username': '',
'password': '',
'use_tag_search': 'False',
'vip': 'False',
'ssl': 'False',
'timeout': '15',
'skip_wrong_fps': 'False'
},
'opensubtitlescom': {
'username': '',
'password': '',
'use_hash': 'True'
},
'addic7ed': {
'username': '',
'password': '',
'cookies': '',
'user_agent': '',
'vip': 'False'
},
'podnapisi': {
'verify_ssl': 'True'
},
'subf2m': {
'verify_ssl': 'True',
'user_agent': ''
},
'whisperai': {
'endpoint': 'http://127.0.0.1:9000',
'timeout': '3600'
},
'legendasdivx': {
'username': '',
'password': '',
'skip_wrong_fps': 'False'
},
'ktuvit': {
'email': '',
'hashed_password': ''
},
'xsubs': {
'username': '',
'password': ''
},
'assrt': {
'token': ''
},
'anticaptcha': {
'anti_captcha_key': ''
},
'deathbycaptcha': {
'username': '',
'password': ''
},
'napisy24': {
'username': '',
'password': ''
},
'subscene': {
'username': '',
'password': ''
},
'betaseries': {
'token': ''
},
'analytics': {
'enabled': 'True'
},
'titlovi': {
'username': '',
'password': ''
},
'titulky': {
'username': '',
'password': '',
'approved_only': 'False'
},
'embeddedsubtitles': {
'included_codecs': '[]',
'hi_fallback': 'False',
'timeout': '600',
'unknown_as_english': 'False',
},
'hdbits': {
'username': '',
'passkey': '',
},
'karagarga': {
'username': '',
'password': '',
'f_username': '',
'f_password': '',
},
'subsync': {
'use_subsync': 'False',
'use_subsync_threshold': 'False',
'subsync_threshold': '90',
'use_subsync_movie_threshold': 'False',
'subsync_movie_threshold': '70',
'debug': 'False',
'force_audio': 'False'
},
'series_scores': {
"hash": 359,
"series": 180,
"year": 90,
"season": 30,
"episode": 30,
"release_group": 14,
"source": 7,
"audio_codec": 3,
"resolution": 2,
"video_codec": 2,
"streaming_service": 1,
"hearing_impaired": 1,
},
'movie_scores': {
"hash": 119,
"title": 60,
"year": 30,
"release_group": 13,
"source": 7,
"audio_codec": 3,
"resolution": 2,
"video_codec": 2,
"streaming_service": 1,
"edition": 1,
"hearing_impaired": 1,
},
'postgresql': {
'enabled': 'False',
'host': 'localhost',
'port': '5432',
'database': '',
'username': '',
'password': '',
},
}
def validate_ip_address(ip_string):
try:
ip_address(ip_string)
return True
except ValueError:
return False
class Validator(OriginalValidator):
# Give the ability to personalize messages sent by the original dynasync Validator class.
default_messages = MappingProxyType(
{
"must_exist_true": "{name} is required",
"must_exist_false": "{name} cannot exists",
"condition": "{name} invalid for {function}({value})",
"operations": "{name} must {operation} {op_value} but it is {value}",
"combined": "combined validators failed {errors}",
}
)
validators = [
# general section
Validator('general.flask_secret_key', must_exist=True, default=hexlify(os.urandom(16)).decode(),
is_type_of=str),
Validator('general.ip', must_exist=True, default='0.0.0.0', is_type_of=str, condition=validate_ip_address),
Validator('general.port', must_exist=True, default=6767, is_type_of=int, gte=1, lte=65535),
Validator('general.base_url', must_exist=True, default='', is_type_of=str),
Validator('general.path_mappings', must_exist=True, default=[], is_type_of=list),
Validator('general.debug', must_exist=True, default=False, is_type_of=bool),
Validator('general.branch', must_exist=True, default='master', is_type_of=str,
is_in=['master', 'development']),
Validator('general.auto_update', must_exist=True, default=True, is_type_of=bool),
Validator('general.single_language', must_exist=True, default=False, is_type_of=bool),
Validator('general.minimum_score', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_scenename', must_exist=True, default=True, is_type_of=bool),
Validator('general.use_postprocessing', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_cmd', must_exist=True, default='', is_type_of=str),
Validator('general.postprocessing_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('general.use_postprocessing_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('general.postprocessing_threshold_movie', must_exist=True, default=70, is_type_of=int, gte=0,
lte=100),
Validator('general.use_postprocessing_threshold_movie', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_sonarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.use_radarr', must_exist=True, default=False, is_type_of=bool),
Validator('general.path_mappings_movie', must_exist=True, default=[], is_type_of=list),
Validator('general.serie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.serie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.movie_default_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.movie_default_profile', must_exist=True, default='', is_type_of=(int, str)),
Validator('general.page_size', must_exist=True, default=25, is_type_of=int,
is_in=[25, 50, 100, 250, 500, 1000]),
Validator('general.theme', must_exist=True, default='auto', is_type_of=str,
is_in=['auto', 'light', 'dark']),
Validator('general.minimum_score_movie', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('general.use_embedded_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.embedded_subs_show_desired', must_exist=True, default=True, is_type_of=bool),
Validator('general.utf8_encode', must_exist=True, default=True, is_type_of=bool),
Validator('general.ignore_pgs_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_vobsub_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.ignore_ass_subs', must_exist=True, default=False, is_type_of=bool),
Validator('general.adaptive_searching', must_exist=True, default=True, is_type_of=bool),
Validator('general.adaptive_searching_delay', must_exist=True, default='3w', is_type_of=str,
is_in=['1w', '2w', '3w', '4w']),
Validator('general.adaptive_searching_delta', must_exist=True, default='1w', is_type_of=str,
is_in=['3d', '1w', '2w', '3w', '4w']),
Validator('general.enabled_providers', must_exist=True, default=[], is_type_of=list),
Validator('general.multithreading', must_exist=True, default=True, is_type_of=bool),
Validator('general.chmod_enabled', must_exist=True, default=False, is_type_of=bool),
Validator('general.chmod', must_exist=True, default='0640', is_type_of=str),
Validator('general.subfolder', must_exist=True, default='current', is_type_of=str),
Validator('general.subfolder_custom', must_exist=True, default='', is_type_of=str),
Validator('general.upgrade_subs', must_exist=True, default=True, is_type_of=bool),
Validator('general.upgrade_frequency', must_exist=True, default=12, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.days_to_upgrade_subs', must_exist=True, default=7, is_type_of=int, gte=0, lte=30),
Validator('general.upgrade_manual', must_exist=True, default=True, is_type_of=bool),
Validator('general.anti_captcha_provider', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'anti-captcha', 'death-by-captcha']),
Validator('general.wanted_search_frequency', must_exist=True, default=6, is_type_of=int, is_in=[6, 12, 24]),
Validator('general.wanted_search_frequency_movie', must_exist=True, default=6, is_type_of=int,
is_in=[6, 12, 24]),
Validator('general.subzero_mods', must_exist=True, default='', is_type_of=str),
Validator('general.dont_notify_manual_actions', must_exist=True, default=False, is_type_of=bool),
Validator('general.hi_extension', must_exist=True, default='hi', is_type_of=str, is_in=['hi', 'cc', 'sdh']),
Validator('general.embedded_subtitles_parser', must_exist=True, default='ffprobe', is_type_of=str,
is_in=['ffprobe', 'mediainfo']),
Validator('general.default_und_audio_lang', must_exist=True, default='', is_type_of=str),
Validator('general.default_und_embedded_subtitles_lang', must_exist=True, default='', is_type_of=str),
Validator('general.parse_embedded_audio_track', must_exist=True, default=False, is_type_of=bool),
Validator('general.skip_hashing', must_exist=True, default=False, is_type_of=bool),
Validator('general.language_equals', must_exist=True, default=[], is_type_of=list),
# auth section
Validator('auth.apikey', must_exist=True, default=hexlify(os.urandom(16)).decode(), is_type_of=str),
Validator('auth.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'basic', 'form']),
Validator('auth.username', must_exist=True, default='', is_type_of=str),
Validator('auth.password', must_exist=True, default='', is_type_of=str),
# cors section
Validator('cors.enabled', must_exist=True, default=False, is_type_of=bool),
# backup section
Validator('backup.folder', must_exist=True, default=os.path.join(args.config_dir, 'backup'),
is_type_of=str),
Validator('backup.retention', must_exist=True, default=31, is_type_of=int, gte=0),
Validator('backup.frequency', must_exist=True, default='Weekly', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('backup.day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('backup.hour', must_exist=True, default=3, is_type_of=int, gte=0, lte=23),
# sonarr section
Validator('sonarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('sonarr.port', must_exist=True, default=8989, is_type_of=int, gte=1, lte=65535),
Validator('sonarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('sonarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('sonarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('sonarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('sonarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('sonarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('sonarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.series_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('sonarr.episodes_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('sonarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.excluded_series_types', must_exist=True, default=[], is_type_of=list),
Validator('sonarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('sonarr.exclude_season_zero', must_exist=True, default=False, is_type_of=bool),
Validator('sonarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
# radarr section
Validator('radarr.ip', must_exist=True, default='127.0.0.1', is_type_of=str),
Validator('radarr.port', must_exist=True, default=7878, is_type_of=int, gte=1, lte=65535),
Validator('radarr.base_url', must_exist=True, default='/', is_type_of=str),
Validator('radarr.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.http_timeout', must_exist=True, default=60, is_type_of=int,
is_in=[60, 120, 180, 240, 300, 600]),
Validator('radarr.apikey', must_exist=True, default='', is_type_of=str),
Validator('radarr.full_update', must_exist=True, default='Daily', is_type_of=str,
is_in=['Manually', 'Daily', 'Weekly']),
Validator('radarr.full_update_day', must_exist=True, default=6, is_type_of=int, gte=0, lte=6),
Validator('radarr.full_update_hour', must_exist=True, default=4, is_type_of=int, gte=0, lte=23),
Validator('radarr.only_monitored', must_exist=True, default=False, is_type_of=bool),
Validator('radarr.movies_sync', must_exist=True, default=60, is_type_of=int,
is_in=[15, 60, 180, 360, 720, 1440]),
Validator('radarr.excluded_tags', must_exist=True, default=[], is_type_of=list),
Validator('radarr.use_ffprobe_cache', must_exist=True, default=True, is_type_of=bool),
Validator('radarr.defer_search_signalr', must_exist=True, default=False, is_type_of=bool),
# proxy section
Validator('proxy.type', must_exist=True, default=None, is_type_of=(NoneType, str),
is_in=[None, 'socks5', 'http']),
Validator('proxy.url', must_exist=True, default='', is_type_of=str),
Validator('proxy.port', must_exist=True, default='', is_type_of=(str, int)),
Validator('proxy.username', must_exist=True, default='', is_type_of=str),
Validator('proxy.password', must_exist=True, default='', is_type_of=str),
Validator('proxy.exclude', must_exist=True, default=["localhost", "127.0.0.1"], is_type_of=list),
# opensubtitles.org section
Validator('opensubtitles.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitles.use_tag_search', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.vip', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.ssl', must_exist=True, default=False, is_type_of=bool),
Validator('opensubtitles.timeout', must_exist=True, default=15, is_type_of=int, gte=1),
Validator('opensubtitles.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# opensubtitles.com section
Validator('opensubtitlescom.username', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.password', must_exist=True, default='', is_type_of=str),
Validator('opensubtitlescom.use_hash', must_exist=True, default=True, is_type_of=bool),
# addic7ed section
Validator('addic7ed.username', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.password', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.cookies', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.user_agent', must_exist=True, default='', is_type_of=str),
Validator('addic7ed.vip', must_exist=True, default=False, is_type_of=bool),
# podnapisi section
Validator('podnapisi.verify_ssl', must_exist=True, default=True, is_type_of=bool),
# subf2m section
Validator('subf2m.verify_ssl', must_exist=True, default=True, is_type_of=bool),
Validator('subf2m.user_agent', must_exist=True, default='', is_type_of=str),
# hdbits section
Validator('hdbits.username', must_exist=True, default='', is_type_of=str),
Validator('hdbits.passkey', must_exist=True, default='', is_type_of=str),
# whisperai section
Validator('whisperai.endpoint', must_exist=True, default='http://127.0.0.1:9000', is_type_of=str),
Validator('whisperai.timeout', must_exist=True, default=3600, is_type_of=int, gte=1),
# legendasdivx section
Validator('legendasdivx.username', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.password', must_exist=True, default='', is_type_of=str),
Validator('legendasdivx.skip_wrong_fps', must_exist=True, default=False, is_type_of=bool),
# ktuvit section
Validator('ktuvit.email', must_exist=True, default='', is_type_of=str),
Validator('ktuvit.hashed_password', must_exist=True, default='', is_type_of=str),
# xsubs section
Validator('xsubs.username', must_exist=True, default='', is_type_of=str),
Validator('xsubs.password', must_exist=True, default='', is_type_of=str),
# assrt section
Validator('assrt.token', must_exist=True, default='', is_type_of=str),
# anticaptcha section
Validator('anticaptcha.anti_captcha_key', must_exist=True, default='', is_type_of=str),
# deathbycaptcha section
Validator('deathbycaptcha.username', must_exist=True, default='', is_type_of=str),
Validator('deathbycaptcha.password', must_exist=True, default='', is_type_of=str),
# napisy24 section
Validator('napisy24.username', must_exist=True, default='', is_type_of=str),
Validator('napisy24.password', must_exist=True, default='', is_type_of=str),
# subscene section
Validator('subscene.username', must_exist=True, default='', is_type_of=str),
Validator('subscene.password', must_exist=True, default='', is_type_of=str),
# betaseries section
Validator('betaseries.token', must_exist=True, default='', is_type_of=str),
# analytics section
Validator('analytics.enabled', must_exist=True, default=True, is_type_of=bool),
# titlovi section
Validator('titlovi.username', must_exist=True, default='', is_type_of=str),
Validator('titlovi.password', must_exist=True, default='', is_type_of=str),
# titulky section
Validator('titulky.username', must_exist=True, default='', is_type_of=str),
Validator('titulky.password', must_exist=True, default='', is_type_of=str),
Validator('titulky.approved_only', must_exist=True, default=False, is_type_of=bool),
# embeddedsubtitles section
Validator('embeddedsubtitles.included_codecs', must_exist=True, default=[], is_type_of=list),
Validator('embeddedsubtitles.hi_fallback', must_exist=True, default=False, is_type_of=bool),
Validator('embeddedsubtitles.timeout', must_exist=True, default=600, is_type_of=int, gte=1),
Validator('embeddedsubtitles.unknown_as_english', must_exist=True, default=False, is_type_of=bool),
# karagarga section
Validator('karagarga.username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.password', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_username', must_exist=True, default='', is_type_of=str),
Validator('karagarga.f_password', must_exist=True, default='', is_type_of=str),
# subsync section
Validator('subsync.use_subsync', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.use_subsync_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_threshold', must_exist=True, default=90, is_type_of=int, gte=0, lte=100),
Validator('subsync.use_subsync_movie_threshold', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.subsync_movie_threshold', must_exist=True, default=70, is_type_of=int, gte=0, lte=100),
Validator('subsync.debug', must_exist=True, default=False, is_type_of=bool),
Validator('subsync.force_audio', must_exist=True, default=False, is_type_of=bool),
# series_scores section
Validator('series_scores.hash', must_exist=True, default=359, is_type_of=int),
Validator('series_scores.series', must_exist=True, default=180, is_type_of=int),
Validator('series_scores.year', must_exist=True, default=90, is_type_of=int),
Validator('series_scores.season', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.episode', must_exist=True, default=30, is_type_of=int),
Validator('series_scores.release_group', must_exist=True, default=14, is_type_of=int),
Validator('series_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('series_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('series_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('series_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('series_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# movie_scores section
Validator('movie_scores.hash', must_exist=True, default=119, is_type_of=int),
Validator('movie_scores.title', must_exist=True, default=60, is_type_of=int),
Validator('movie_scores.year', must_exist=True, default=30, is_type_of=int),
Validator('movie_scores.release_group', must_exist=True, default=13, is_type_of=int),
Validator('movie_scores.source', must_exist=True, default=7, is_type_of=int),
Validator('movie_scores.audio_codec', must_exist=True, default=3, is_type_of=int),
Validator('movie_scores.resolution', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.video_codec', must_exist=True, default=2, is_type_of=int),
Validator('movie_scores.streaming_service', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.edition', must_exist=True, default=1, is_type_of=int),
Validator('movie_scores.hearing_impaired', must_exist=True, default=1, is_type_of=int),
# postgresql section
Validator('postgresql.enabled', must_exist=True, default=False, is_type_of=bool),
Validator('postgresql.host', must_exist=True, default='localhost', is_type_of=str),
Validator('postgresql.port', must_exist=True, default=5432, is_type_of=int, gte=1, lte=65535),
Validator('postgresql.database', must_exist=True, default='', is_type_of=str),
Validator('postgresql.username', must_exist=True, default='', is_type_of=str),
Validator('postgresql.password', must_exist=True, default='', is_type_of=str),
]
def convert_ini_to_yaml(config_file):
import configparser
import yaml
config_object = configparser.ConfigParser()
file = open(config_file, "r")
config_object.read_file(file)
output_dict = dict()
sections = config_object.sections()
for section in sections:
items = config_object.items(section)
output_dict[section] = dict()
for item in items:
try:
output_dict[section].update({item[0]: ast.literal_eval(item[1])})
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
output_dict[section].update({item[0]: item[1]})
with open(os.path.join(os.path.dirname(config_file), 'config.yaml'), 'w') as file:
yaml.dump(output_dict, file)
os.rename(config_file, config_file + '.old')
config_yaml_file = os.path.join(args.config_dir, 'config', 'config.yaml')
config_ini_file = os.path.join(args.config_dir, 'config', 'config.ini')
if os.path.exists(config_ini_file) and not os.path.exists(config_yaml_file):
convert_ini_to_yaml(config_ini_file)
elif not os.path.exists(config_yaml_file):
if not os.path.isdir(os.path.dirname(config_yaml_file)):
os.makedirs(os.path.dirname(config_yaml_file))
open(config_yaml_file, mode='w').close()
settings = Dynaconf(
settings_file=config_yaml_file,
core_loaders=['YAML'],
apply_default_on_none=True,
)
settings.validators.register(*validators)
failed_validator = True
while failed_validator:
try:
settings.validators.validate_all()
failed_validator = False
except ValidationError as e:
current_validator_details = e.details[0][0]
if hasattr(current_validator_details, 'default') and current_validator_details.default is not empty:
settings[current_validator_details.names[0]] = current_validator_details.default
else:
logging.critical(f"Value for {current_validator_details.names[0]} doesn't pass validation and there's no "
f"default value. This issue must be reported. Bazarr won't works until it's been fixed.")
os._exit(0)
def write_config():
write(settings_path=config_yaml_file,
settings_data={k.lower(): v for k, v in settings.as_dict().items()},
merge=False)
settings = SimpleConfigParser(defaults=defaults, interpolation=None)
settings.read(os.path.join(args.config_dir, 'config', 'config.ini'))
settings.general.base_url = settings.general.base_url if settings.general.base_url else '/'
base_url = settings.general.base_url.rstrip('/')
ignore_keys = ['flask_secret_key']
raw_keys = ['movie_default_forced', 'serie_default_forced']
array_keys = ['excluded_tags',
'exclude',
'included_codecs',
@ -305,79 +401,50 @@ array_keys = ['excluded_tags',
'path_mappings_movie',
'language_equals']
str_keys = ['chmod']
empty_values = ['', 'None', 'null', 'undefined', None, []]
str_keys = ['chmod']
# Increase Sonarr and Radarr sync interval since we now use SignalR feed to update in real time
if int(settings.sonarr.series_sync) < 15:
settings.sonarr.series_sync = "60"
if int(settings.sonarr.episodes_sync) < 15:
settings.sonarr.episodes_sync = "60"
if int(settings.radarr.movies_sync) < 15:
settings.radarr.movies_sync = "60"
if settings.sonarr.series_sync < 15:
settings.sonarr.series_sync = 60
if settings.sonarr.episodes_sync < 15:
settings.sonarr.episodes_sync = 60
if settings.radarr.movies_sync < 15:
settings.radarr.movies_sync = 60
# Make sure to get of double slashes in base_url
settings.general.base_url = base_url_slash_cleaner(uri=settings.general.base_url)
settings.sonarr.base_url = base_url_slash_cleaner(uri=settings.sonarr.base_url)
settings.radarr.base_url = base_url_slash_cleaner(uri=settings.radarr.base_url)
# fixing issue with improper page_size value
if settings.general.page_size not in ['25', '50', '100', '250', '500', '1000']:
settings.general.page_size = defaults['general']['page_size']
# increase delay between searches to reduce impact on providers
if settings.general.wanted_search_frequency == '3':
settings.general.wanted_search_frequency = '6'
if settings.general.wanted_search_frequency_movie == '3':
settings.general.wanted_search_frequency_movie = '6'
if settings.general.wanted_search_frequency == 3:
settings.general.wanted_search_frequency = 6
if settings.general.wanted_search_frequency_movie == 3:
settings.general.wanted_search_frequency_movie = 6
# save updated settings to file
if os.path.exists(os.path.join(args.config_dir, 'config', 'config.ini')):
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
def get_settings():
result = dict()
sections = settings.sections()
for sec in sections:
sec_values = settings.items(sec, False)
values_dict = dict()
for sec_val in sec_values:
key = sec_val[0]
value = sec_val[1]
if key in ignore_keys:
continue
if key not in raw_keys:
# Do some postprocessings
if value in empty_values:
if key in array_keys:
value = []
else:
continue
elif key in array_keys:
value = get_array_from(value)
elif value == 'True':
value = True
elif value == 'False':
value = False
# return {k.lower(): v for k, v in settings.as_dict().items()}
settings_to_return = {}
for k, v in settings.as_dict().items():
if isinstance(v, dict):
k = k.lower()
settings_to_return[k] = dict()
for subk, subv in v.items():
if subk.lower() in ignore_keys:
continue
if subv in empty_values and subk.lower() in array_keys:
settings_to_return[k].update({subk: []})
elif subk == 'subzero_mods':
settings_to_return[k].update({subk: get_array_from(subv)})
else:
if key not in str_keys:
try:
value = int(value)
except ValueError:
pass
values_dict[key] = value
result[sec] = values_dict
return result
settings_to_return[k].update({subk: subv})
return settings_to_return
def save_settings(settings_items):
@ -408,24 +475,31 @@ def save_settings(settings_items):
settings_keys = key.split('-')
# Make sure that text based form values aren't pass as list
# Make sure that text based form values aren't passed as list
if isinstance(value, list) and len(value) == 1 and settings_keys[-1] not in array_keys:
value = value[0]
if value in empty_values and value != '':
value = None
# try to cast string as integer
if isinstance(value, str) and settings_keys[-1] not in str_keys:
try:
value = int(value)
except ValueError:
pass
# Make sure empty language list are stored correctly
if settings_keys[-1] in array_keys and value[0] in empty_values:
value = []
# Handle path mappings settings since they are array in array
if settings_keys[-1] in ['path_mappings', 'path_mappings_movie']:
value = [v.split(',') for v in value]
value = [x.split(',') for x in value if isinstance(x, str)]
if value == 'true':
value = 'True'
value = True
elif value == 'false':
value = 'False'
value = False
if key in ['settings-general-use_embedded_subs', 'settings-general-ignore_pgs_subs',
'settings-general-ignore_vobsub_subs', 'settings-general-ignore_ass_subs']:
@ -553,14 +627,13 @@ def save_settings(settings_items):
reset_throttled_providers(only_auth_or_conf_error=True)
if settings_keys[0] == 'settings':
settings[settings_keys[1]][settings_keys[2]] = str(value)
settings[settings_keys[1]][settings_keys[2]] = value
if settings_keys[0] == 'subzero':
mod = settings_keys[1]
enabled = value == 'True'
if mod in subzero_mods and not enabled:
if mod in subzero_mods and not value:
subzero_mods.remove(mod)
elif enabled:
elif value:
subzero_mods.append(mod)
# Handle color
@ -581,77 +654,82 @@ def save_settings(settings_items):
from .scheduler import scheduler
from subtitles.indexer.series import list_missing_subtitles
from subtitles.indexer.movies import list_missing_subtitles_movies
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
if undefined_subtitles_track_default_changed:
from .scheduler import scheduler
from subtitles.indexer.series import series_full_scan_subtitles
from subtitles.indexer.movies import movies_full_scan_subtitles
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.add_job(series_full_scan_subtitles, kwargs={'use_cache': True})
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.add_job(movies_full_scan_subtitles, kwargs={'use_cache': True})
if audio_tracks_parsing_changed:
from .scheduler import scheduler
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
from sonarr.sync.series import update_series
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
from radarr.sync.movies import update_movies
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)
if update_subzero:
settings.set('general', 'subzero_mods', ','.join(subzero_mods))
settings.general.subzero_mods = ','.join(subzero_mods)
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
try:
settings.validators.validate()
except ValidationError:
settings.reload()
raise
else:
write_config()
# Reconfigure Bazarr to reflect changes
if configure_debug:
from .logger import configure_logging
configure_logging(settings.general.getboolean('debug') or args.debug)
# Reconfigure Bazarr to reflect changes
if configure_debug:
from .logger import configure_logging
configure_logging(settings.general.debug or args.debug)
if configure_captcha:
configure_captcha_func()
if configure_captcha:
configure_captcha_func()
if update_schedule:
from .scheduler import scheduler
from .event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if update_schedule:
from .scheduler import scheduler
from .event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if sonarr_changed:
from .signalr_client import sonarr_signalr_client
try:
sonarr_signalr_client.restart()
except Exception:
pass
if sonarr_changed:
from .signalr_client import sonarr_signalr_client
try:
sonarr_signalr_client.restart()
except Exception:
pass
if radarr_changed:
from .signalr_client import radarr_signalr_client
try:
radarr_signalr_client.restart()
except Exception:
pass
if radarr_changed:
from .signalr_client import radarr_signalr_client
try:
radarr_signalr_client.restart()
except Exception:
pass
if update_path_map:
from utilities.path_mappings import path_mappings
path_mappings.update()
if update_path_map:
from utilities.path_mappings import path_mappings
path_mappings.update()
if configure_proxy:
configure_proxy_func()
if configure_proxy:
configure_proxy_func()
if exclusion_updated:
from .event_handler import event_stream
event_stream(type='badges')
if sonarr_exclusion_updated:
event_stream(type='reset-episode-wanted')
if radarr_exclusion_updated:
event_stream(type='reset-movie-wanted')
if exclusion_updated:
from .event_handler import event_stream
event_stream(type='badges')
if sonarr_exclusion_updated:
event_stream(type='reset-episode-wanted')
if radarr_exclusion_updated:
event_stream(type='reset-movie-wanted')
def get_array_from(property):
@ -681,15 +759,15 @@ def configure_captcha_func():
def configure_proxy_func():
if settings.proxy.type != 'None':
if settings.proxy.type:
if settings.proxy.username != '' and settings.proxy.password != '':
proxy = settings.proxy.type + '://' + quote_plus(settings.proxy.username) + ':' + \
quote_plus(settings.proxy.password) + '@' + settings.proxy.url + ':' + settings.proxy.port
quote_plus(settings.proxy.password) + '@' + settings.proxy.url + ':' + str(settings.proxy.port)
else:
proxy = settings.proxy.type + '://' + settings.proxy.url + ':' + settings.proxy.port
proxy = settings.proxy.type + '://' + settings.proxy.url + ':' + str(settings.proxy.port)
os.environ['HTTP_PROXY'] = str(proxy)
os.environ['HTTPS_PROXY'] = str(proxy)
exclude = ','.join(get_array_from(settings.proxy.exclude))
exclude = ','.join(settings.proxy.exclude)
os.environ['NO_PROXY'] = exclude

View File

@ -18,12 +18,16 @@ from sqlalchemy.pool import NullPool
from flask_sqlalchemy import SQLAlchemy
from .config import settings, get_array_from
from .config import settings
from .get_args import args
logger = logging.getLogger(__name__)
postgresql = (os.getenv("POSTGRES_ENABLED", settings.postgresql.enabled).lower() == 'true')
POSTGRES_ENABLED_ENV = os.getenv("POSTGRES_ENABLED")
if POSTGRES_ENABLED_ENV:
postgresql = POSTGRES_ENABLED_ENV.lower() == 'true'
else:
postgresql = settings.postgresql.enabled
region = make_region().configure('dogpile.cache.memory')
@ -324,30 +328,30 @@ def migrate_db(app):
def get_exclusion_clause(exclusion_type):
where_clause = []
if exclusion_type == 'series':
tagsList = ast.literal_eval(settings.sonarr.excluded_tags)
tagsList = settings.sonarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableShows.tags.contains("\'" + tag + "\'")))
else:
tagsList = ast.literal_eval(settings.radarr.excluded_tags)
tagsList = settings.radarr.excluded_tags
for tag in tagsList:
where_clause.append(~(TableMovies.tags.contains("\'" + tag + "\'")))
if exclusion_type == 'series':
monitoredOnly = settings.sonarr.getboolean('only_monitored')
monitoredOnly = settings.sonarr.only_monitored
if monitoredOnly:
where_clause.append((TableEpisodes.monitored == 'True')) # noqa E712
where_clause.append((TableShows.monitored == 'True')) # noqa E712
else:
monitoredOnly = settings.radarr.getboolean('only_monitored')
monitoredOnly = settings.radarr.only_monitored
if monitoredOnly:
where_clause.append((TableMovies.monitored == 'True')) # noqa E712
if exclusion_type == 'series':
typesList = get_array_from(settings.sonarr.excluded_series_types)
typesList = settings.sonarr.excluded_series_types
for item in typesList:
where_clause.append((TableShows.seriesType != item))
exclude_season_zero = settings.sonarr.getboolean('exclude_season_zero')
exclude_season_zero = settings.sonarr.exclude_season_zero
if exclude_season_zero:
where_clause.append((TableEpisodes.season != 0))

View File

@ -1,6 +1,5 @@
# coding=utf-8
import ast
import os
import datetime
import pytz
@ -21,7 +20,7 @@ from subliminal import region as subliminal_cache_region
from subliminal_patch.extensions import provider_registry
from app.get_args import args
from app.config import settings, get_array_from
from app.config import settings
from languages.get_languages import CustomLanguage
from app.event_handler import event_stream
from utilities.binaries import get_binary
@ -126,7 +125,7 @@ throttle_count = {}
def provider_pool():
if settings.general.getboolean('multithreading'):
if settings.general.multithreading:
return subliminal_patch.core.SZAsyncProviderPool
return subliminal_patch.core.SZProviderPool
@ -157,7 +156,7 @@ def _lang_from_str(content: str):
def get_language_equals(settings_=None):
settings_ = settings_ or settings
equals = get_array_from(settings_.general.language_equals)
equals = settings_.general.language_equals
if not equals:
return []
@ -177,7 +176,7 @@ def get_language_equals(settings_=None):
def get_providers():
providers_list = []
existing_providers = provider_registry.names()
providers = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
providers_list.append(provider)
@ -205,9 +204,9 @@ def get_providers():
def get_enabled_providers():
# return enabled provider including those who can be throttled
try:
return ast.literal_eval(settings.general.enabled_providers)
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
if isinstance(settings.general.enabled_providers, list):
return settings.general.enabled_providers
else:
return []
@ -222,32 +221,28 @@ def get_providers_auth():
'password': settings.addic7ed.password,
'cookies': settings.addic7ed.cookies,
'user_agent': settings.addic7ed.user_agent,
'is_vip': settings.addic7ed.getboolean('vip'),
'is_vip': settings.addic7ed.vip,
},
'opensubtitles': {
'username': settings.opensubtitles.username,
'password': settings.opensubtitles.password,
'use_tag_search': settings.opensubtitles.getboolean(
'use_tag_search'
),
'use_tag_search': settings.opensubtitles.use_tag_search,
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'is_vip': settings.opensubtitles.getboolean('vip'),
'use_ssl': settings.opensubtitles.getboolean('ssl'),
'is_vip': settings.opensubtitles.vip,
'use_ssl': settings.opensubtitles.ssl,
'timeout': int(settings.opensubtitles.timeout) or 15,
'skip_wrong_fps': settings.opensubtitles.getboolean(
'skip_wrong_fps'
),
'skip_wrong_fps': settings.opensubtitles.skip_wrong_fps,
},
'opensubtitlescom': {'username': settings.opensubtitlescom.username,
'password': settings.opensubtitlescom.password,
'use_hash': settings.opensubtitlescom.getboolean('use_hash'),
'use_hash': settings.opensubtitlescom.use_hash,
'api_key': 's38zmzVlW7IlYruWi7mHwDYl2SfMQoC1'
},
'podnapisi': {
'only_foreign': False, # fixme
'also_foreign': False, # fixme
'verify_ssl': settings.podnapisi.getboolean('verify_ssl')
'verify_ssl': settings.podnapisi.verify_ssl
},
'subscene': {
'username': settings.subscene.username,
@ -257,9 +252,7 @@ def get_providers_auth():
'legendasdivx': {
'username': settings.legendasdivx.username,
'password': settings.legendasdivx.password,
'skip_wrong_fps': settings.legendasdivx.getboolean(
'skip_wrong_fps'
),
'skip_wrong_fps': settings.legendasdivx.skip_wrong_fps,
},
'xsubs': {
'username': settings.xsubs.username,
@ -276,7 +269,7 @@ def get_providers_auth():
'titulky': {
'username': settings.titulky.username,
'password': settings.titulky.password,
'approved_only': settings.titulky.getboolean('approved_only'),
'approved_only': settings.titulky.approved_only,
},
'titlovi': {
'username': settings.titlovi.username,
@ -287,13 +280,13 @@ def get_providers_auth():
'hashed_password': settings.ktuvit.hashed_password,
},
'embeddedsubtitles': {
'included_codecs': get_array_from(settings.embeddedsubtitles.included_codecs),
'hi_fallback': settings.embeddedsubtitles.getboolean('hi_fallback'),
'included_codecs': settings.embeddedsubtitles.included_codecs,
'hi_fallback': settings.embeddedsubtitles.hi_fallback,
'cache_dir': os.path.join(args.config_dir, "cache"),
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_english': settings.embeddedsubtitles.getboolean('unknown_as_english'),
'unknown_as_english': settings.embeddedsubtitles.unknown_as_english,
},
'karagarga': {
'username': settings.karagarga.username,
@ -306,7 +299,7 @@ def get_providers_auth():
'passkey': settings.hdbits.passkey,
},
'subf2m': {
'verify_ssl': settings.subf2m.getboolean('verify_ssl'),
'verify_ssl': settings.subf2m.verify_ssl,
'user_agent': settings.subf2m.user_agent,
},
'whisperai': {
@ -414,7 +407,7 @@ def throttled_count(name):
def update_throttled_provider():
existing_providers = provider_registry.names()
providers_list = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers_list = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in list(tp):
if provider not in providers_list:
@ -448,7 +441,7 @@ def list_throttled_providers():
update_throttled_provider()
throttled_providers = []
existing_providers = provider_registry.names()
providers = [x for x in get_array_from(settings.general.enabled_providers) if x in existing_providers]
providers = [x for x in settings.general.enabled_providers if x in existing_providers]
for provider in providers:
reason, until, throttle_desc = tp.get(provider, (None, None, None))
throttled_providers.append([provider, reason, pretty.date(until)])

View File

@ -160,14 +160,14 @@ class Scheduler:
return task_list
def __sonarr_update_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
update_series, IntervalTrigger(minutes=int(settings.sonarr.series_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_series', name='Sync with Sonarr',
replace_existing=True)
def __radarr_update_task(self):
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.aps_scheduler.add_job(
update_movies, IntervalTrigger(minutes=int(settings.radarr.movies_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_movies', name='Sync with Radarr',
@ -200,7 +200,7 @@ class Scheduler:
pass
def __sonarr_full_update_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
full_update = settings.sonarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
@ -220,7 +220,7 @@ class Scheduler:
name='Index all Episode Subtitles from disk', replace_existing=True)
def __radarr_full_update_task(self):
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
full_update = settings.radarr.full_update
if full_update == "Daily":
self.aps_scheduler.add_job(
@ -242,7 +242,7 @@ class Scheduler:
if not args.no_update and os.environ["BAZARR_VERSION"] != '':
task_name = 'Update Bazarr'
if settings.general.getboolean('auto_update'):
if settings.general.auto_update:
self.aps_scheduler.add_job(
check_if_new_update, IntervalTrigger(hours=6), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_bazarr', name=task_name, replace_existing=True)
@ -264,13 +264,13 @@ class Scheduler:
id='update_announcements', name='Update Announcements File', replace_existing=True)
def __search_wanted_subtitles_task(self):
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_series,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency)), max_instances=1, coalesce=True,
misfire_grace_time=15, id='wanted_search_missing_subtitles_series', replace_existing=True,
name='Search for wanted Series Subtitles')
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.aps_scheduler.add_job(
wanted_search_missing_subtitles_movies,
IntervalTrigger(hours=int(settings.general.wanted_search_frequency_movie)), max_instances=1,
@ -278,8 +278,8 @@ class Scheduler:
name='Search for wanted Movies Subtitles', replace_existing=True)
def __upgrade_subtitles_task(self):
if settings.general.getboolean('upgrade_subs') and \
(settings.general.getboolean('use_sonarr') or settings.general.getboolean('use_radarr')):
if settings.general.upgrade_subs and \
(settings.general.use_sonarr or settings.general.use_radarr):
self.aps_scheduler.add_job(
upgrade_subtitles, IntervalTrigger(hours=int(settings.general.upgrade_frequency)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='upgrade_subtitles',
@ -303,9 +303,9 @@ scheduler = Scheduler()
# Force the execution of the sync process with Sonarr and Radarr after migration to v0.9.1
if 'BAZARR_AUDIO_PROFILES_MIGRATION' in os.environ:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
scheduler.aps_scheduler.modify_job('update_series', next_run_time=datetime.now())
scheduler.aps_scheduler.modify_job('sync_episodes', next_run_time=datetime.now())
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
scheduler.aps_scheduler.modify_job('update_movies', next_run_time=datetime.now())
del os.environ['BAZARR_AUDIO_PROFILES_MIGRATION']

View File

@ -86,7 +86,7 @@ class SonarrSignalrClientLegacy:
if self.connection:
if self.connection.started:
self.stop(log=False)
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
@ -133,7 +133,7 @@ class SonarrSignalrClient:
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
self.start()
def exception_handler(self):
@ -200,7 +200,7 @@ class RadarrSignalrClient:
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
self.start()
def exception_handler(self):
@ -300,11 +300,11 @@ def dispatcher(data):
elif topic == 'episode':
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.getboolean('defer_search_signalr'))
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.defer_search_signalr)
elif topic == 'movie':
logging.debug(f'Event received from Radarr for movie: {movie_title} ({movie_year})')
update_one_movie(movie_id=media_id, action=action,
defer_search=settings.radarr.getboolean('defer_search_signalr'))
defer_search=settings.radarr.defer_search_signalr)
except Exception as e:
logging.debug('BAZARR an exception occurred while parsing SignalR feed: {}'.format(repr(e)))
finally:

View File

@ -11,7 +11,7 @@ import rarfile
from dogpile.cache.region import register_backend as register_cache_backend
from app.config import settings, configure_captcha_func, get_array_from
from app.config import settings, configure_captcha_func, write_config
from app.get_args import args
from app.logger import configure_logging
from utilities.binaries import get_binary, BinaryNotFound
@ -62,7 +62,7 @@ configure_captcha_func()
from ga4mp import GtagMP # noqa E402
# configure logging
configure_logging(settings.general.getboolean('debug') or args.debug)
configure_logging(settings.general.debug or args.debug)
import logging # noqa E402
@ -111,30 +111,14 @@ if not args.no_update:
restart_file.close()
os._exit(0)
# create random api_key if there's none in config.ini
if not settings.auth.apikey or settings.auth.apikey.startswith("b'"):
from binascii import hexlify
settings.auth.apikey = hexlify(os.urandom(16)).decode()
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
# create random Flask secret_key if there's none in config.ini
if not settings.general.flask_secret_key:
from binascii import hexlify
settings.general.flask_secret_key = hexlify(os.urandom(16)).decode()
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
# change default base_url to ''
settings.general.base_url = settings.general.base_url.rstrip('/')
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# migrate enabled_providers from comma separated string to list
if isinstance(settings.general.enabled_providers, str) and not settings.general.enabled_providers.startswith('['):
settings.general.enabled_providers = str(settings.general.enabled_providers.split(","))
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# Read package_info (if exists) to override some settings by package maintainers
# This file can also provide some info about the package version and author
@ -166,8 +150,7 @@ if os.path.isfile(package_info_file):
except Exception:
pass
else:
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
write_config()
# Configure dogpile file caching for Subliminal request
register_cache_backend("subzero.cache.file", "subzero.cache_backends.file", "SZFileBackend")
@ -186,30 +169,24 @@ if not os.path.exists(os.path.join(args.config_dir, 'config', 'announcements.txt
get_announcements_to_file()
logging.debug("BAZARR Created announcements file")
config_file = os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini'))
# Move GA visitor from config.ini to dedicated file
if settings.analytics.visitor:
# Move GA visitor from config to dedicated file
if 'visitor' in settings.analytics:
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'analytics.dat')), 'w+') as handle:
handle.write(settings.analytics.visitor)
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini')), 'w+') as handle:
settings.remove_option('analytics', 'visitor')
settings.write(handle)
settings['analytics'].pop('visitor', None)
# Clean unused settings from config.ini
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini')), 'w+') as handle:
settings.remove_option('general', 'throtteled_providers')
settings.remove_option('general', 'update_restart')
settings.write(handle)
# Clean unused settings from config
settings['general'].pop('throtteled_providers', None)
settings['general'].pop('update_restart', None)
write_config()
# Remove deprecated providers from enabled providers in config.ini
# Remove deprecated providers from enabled providers in config
from subliminal_patch.extensions import provider_registry # noqa E401
existing_providers = provider_registry.names()
enabled_providers = get_array_from(settings.general.enabled_providers)
settings.general.enabled_providers = str([x for x in enabled_providers if x in existing_providers])
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
enabled_providers = settings.general.enabled_providers
settings.general.enabled_providers = [x for x in enabled_providers if x in existing_providers]
write_config()
def init_binaries():

View File

@ -28,7 +28,7 @@ if bazarr_version != '':
apply_update()
# Check for new update and install latest
if args.no_update or not settings.general.getboolean('auto_update'):
if args.no_update or not settings.general.auto_update:
# user have explicitly requested that we do not update or is using some kind of package/docker that prevent it
check_releases()
else:
@ -74,9 +74,9 @@ login_auth = settings.auth.type
update_notifier()
if not args.no_signalr:
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
Thread(target=sonarr_signalr_client.start).start()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
Thread(target=radarr_signalr_client.start).start()

View File

@ -26,7 +26,7 @@ class GetRadarrInfo:
return radarr_version
else:
radarr_version = ''
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
try:
rv = url_radarr() + "/api/system/status?apikey=" + settings.radarr.apikey
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()
@ -75,7 +75,7 @@ get_radarr_info = GetRadarrInfo()
def url_radarr():
if settings.radarr.getboolean('ssl'):
if settings.radarr.ssl:
protocol_radarr = "https"
else:
protocol_radarr = "http"

View File

@ -68,7 +68,7 @@ def update_movies(send_event=True):
logging.debug('BAZARR Starting movie sync from Radarr.')
apikey_radarr = settings.radarr.apikey
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
movie_default_enabled = settings.general.movie_default_enabled
if movie_default_enabled is True:
movie_default_profile = settings.general.movie_default_profile
@ -179,7 +179,7 @@ def update_one_movie(movie_id, action, defer_search=False):
existing_movie.path)))
return
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
movie_default_enabled = settings.general.movie_default_enabled
if movie_default_enabled is True:
movie_default_profile = settings.general.movie_default_profile

View File

@ -92,7 +92,7 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
videoCodec = None
audioCodec = None
if settings.general.getboolean('parse_embedded_audio_track'):
if settings.general.parse_embedded_audio_track:
audio_language = embedded_audio_reader(path_mappings.path_replace_movie(movie['movieFile']['path']),
file_size=movie['movieFile']['size'],
movie_file_id=movie['movieFile']['id'],

View File

@ -26,7 +26,7 @@ class GetSonarrInfo:
return sonarr_version
else:
sonarr_version = ''
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
try:
sv = url_sonarr() + "/api/system/status?apikey=" + settings.sonarr.apikey
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()
@ -75,7 +75,7 @@ get_sonarr_info = GetSonarrInfo()
def url_sonarr():
if settings.sonarr.getboolean('ssl'):
if settings.sonarr.ssl:
protocol_sonarr = "https"
else:
protocol_sonarr = "http"

View File

@ -32,7 +32,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
imdbId = show['imdbId'] if 'imdbId' in show else None
audio_language = []
if not settings.general.getboolean('parse_embedded_audio_track'):
if not settings.general.parse_embedded_audio_track:
if get_sonarr_info.is_legacy():
audio_language = profile_id_to_language(show['qualityProfileId'], audio_profiles)
else:
@ -98,7 +98,7 @@ def episodeParser(episode):
else:
sceneName = None
if settings.general.getboolean('parse_embedded_audio_track'):
if settings.general.parse_embedded_audio_track:
audio_language = embedded_audio_reader(path_mappings.path_replace(episode['episodeFile']
['path']),
file_size=episode['episodeFile']['size'],

View File

@ -23,7 +23,7 @@ def update_series(send_event=True):
if apikey_sonarr is None:
return
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
serie_default_enabled = settings.general.serie_default_enabled
if serie_default_enabled is True:
serie_default_profile = settings.general.serie_default_profile
@ -134,7 +134,7 @@ def update_one_series(series_id, action):
event_stream(type='series', action='delete', payload=int(series_id))
return
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
serie_default_enabled = settings.general.serie_default_enabled
if serie_default_enabled is True:
serie_default_profile = settings.general.serie_default_profile

View File

@ -23,7 +23,7 @@ def is_search_active(desired_language, attempt_string):
@rtype: bool
"""
if settings.general.getboolean('adaptive_searching'):
if settings.general.adaptive_searching:
logging.debug("Adaptive searching is enable, we'll see if it's time to search again...")
try:
# let's try to get a list of lists from the string representation in database

View File

@ -12,7 +12,7 @@ from subliminal_patch.core import save_subtitles
from subliminal_patch.core_persistent import download_best_subtitles
from subliminal_patch.score import ComputeScore
from app.config import settings, get_array_from, get_scores
from app.config import settings, get_scores
from app.database import TableEpisodes, TableMovies, database, select
from utilities.path_mappings import path_mappings
from utilities.helper import get_target_folder, force_unicode
@ -31,7 +31,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
logging.debug('BAZARR Searching subtitles for this file: ' + path)
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
@ -52,7 +52,7 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
minimum_score_movie = settings.general.minimum_score_movie
min_score, max_score, scores = _get_scores(media_type, minimum_score_movie, minimum_score)
subz_mods = get_array_from(settings.general.subzero_mods)
subz_mods = settings.general.subzero_mods
saved_any = False
if providers:
@ -86,9 +86,9 @@ def generate_subtitles(path, languages, audio_language, sceneName, title, media_
try:
fld = get_target_folder(path)
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
saved_subtitles = save_subtitles(video.original_path, subtitles,
single=settings.general.getboolean('single_language'),
single=settings.general.single_language,
tags=None, # fixme
directory=fld,
chmod=chmod,

View File

@ -25,7 +25,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
actual_subtitles = []
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
if settings.general.use_embedded_subs:
logging.debug("BAZARR is trying to index embedded subtitles.")
item = database.execute(
select(TableMovies.movie_file_id, TableMovies.file_size)
@ -41,10 +41,10 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
if (settings.general.ignore_pgs_subs and subtitle_codec.lower() == "pgs") or \
(settings.general.ignore_vobsub_subs and subtitle_codec.lower() ==
"vobsub") or \
(settings.general.getboolean("ignore_ass_subs") and subtitle_codec.lower() ==
(settings.general.ignore_ass_subs and subtitle_codec.lower() ==
"ass"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -85,7 +85,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
only_one=settings.general.single_language)
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
if settings.general.subfolder == "absolute":
@ -168,7 +168,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
TableMovies.audio_language)) \
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
use_embedded_subs = settings.general.use_embedded_subs
for movie_subtitles in movies_subtitles:
missing_subtitles_text = '[]'
@ -264,7 +264,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
event_stream(type='badges')
def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe_cache')):
def movies_full_scan_subtitles(use_cache=settings.radarr.use_ffprobe_cache):
movies = database.execute(
select(TableMovies.path))\
.all()

View File

@ -25,7 +25,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
logging.debug('BAZARR started subtitles indexing for this file: ' + reversed_path)
actual_subtitles = []
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
if settings.general.use_embedded_subs:
logging.debug("BAZARR is trying to index embedded subtitles.")
item = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.file_size)
@ -41,10 +41,10 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
if (settings.general.getboolean("ignore_pgs_subs") and subtitle_codec.lower() == "pgs") or \
(settings.general.getboolean("ignore_vobsub_subs") and subtitle_codec.lower() ==
if (settings.general.ignore_pgs_subs and subtitle_codec.lower() == "pgs") or \
(settings.general.ignore_vobsub_subs and subtitle_codec.lower() ==
"vobsub") or \
(settings.general.getboolean("ignore_ass_subs") and subtitle_codec.lower() ==
(settings.general.ignore_ass_subs and subtitle_codec.lower() ==
"ass"):
logging.debug("BAZARR skipping %s sub for language: %s" % (subtitle_codec, alpha2_from_alpha3(subtitle_language)))
continue
@ -84,7 +84,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
only_one=settings.general.single_language)
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
if settings.general.subfolder == "absolute":
@ -168,7 +168,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
.where(episodes_subtitles_clause))\
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
use_embedded_subs = settings.general.use_embedded_subs
for episode_subtitles in episodes_subtitles:
missing_subtitles_text = '[]'
@ -266,7 +266,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
event_stream(type='badges')
def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe_cache')):
def series_full_scan_subtitles(use_cache=settings.sonarr.use_ffprobe_cache):
episodes = database.execute(
select(TableEpisodes.path))\
.all()

View File

@ -14,7 +14,7 @@ from subliminal_patch.core_persistent import list_all_subtitles, download_subtit
from subliminal_patch.score import ComputeScore
from languages.get_languages import alpha3_from_alpha2
from app.config import get_scores, settings, get_array_from
from app.config import get_scores, settings
from utilities.helper import get_target_folder, force_unicode
from app.database import get_profiles_list
@ -158,7 +158,7 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
use_original_format, profile_id):
logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path)
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
os.environ["SZ_KEEP_ENCODING"] = ""
else:
os.environ["SZ_KEEP_ENCODING"] = "True"
@ -174,7 +174,7 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
subtitle.language.forced = False
if use_original_format == 'True':
subtitle.use_original_format = use_original_format
subtitle.mods = get_array_from(settings.general.subzero_mods)
subtitle.mods = settings.general.subzero_mods
video = get_video(force_unicode(path), title, sceneName, providers={provider}, media_type=media_type)
if video:
try:
@ -193,9 +193,9 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
return 'No valid Subtitles file found'
try:
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
saved_subtitles = save_subtitles(video.original_path, [subtitle],
single=settings.general.getboolean('single_language'),
single=settings.general.single_language,
tags=None, # fixme
directory=get_target_folder(path),
chmod=chmod,

View File

@ -39,7 +39,7 @@ class ProcessSubtitlesResult:
def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_upgrade=False, is_manual=False):
use_postprocessing = settings.general.getboolean('use_postprocessing')
use_postprocessing = settings.general.use_postprocessing
postprocessing_cmd = settings.general.postprocessing_cmd
downloaded_provider = subtitle.provider_name
@ -109,10 +109,10 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
percent_score, subtitle_id, downloaded_provider, series_id, episode_id)
if media_type == 'series':
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold')
use_pp_threshold = settings.general.use_postprocessing_threshold
pp_threshold = int(settings.general.postprocessing_threshold)
else:
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold_movie')
use_pp_threshold = settings.general.use_postprocessing_threshold_movie
pp_threshold = int(settings.general.postprocessing_threshold_movie)
if not use_pp_threshold or (use_pp_threshold and percent_score < pp_threshold):

View File

@ -12,16 +12,16 @@ def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_s
sonarr_episode_id=None, radarr_id=None):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
elif not settings.subsync.getboolean('use_subsync'):
elif not settings.subsync.use_subsync:
logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.')
else:
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
f'subtitles: {srt_path}.')
if media_type == 'series':
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
use_subsync_threshold = settings.subsync.use_subsync_threshold
subsync_threshold = settings.subsync.subsync_threshold
else:
use_subsync_threshold = settings.subsync.getboolean('use_subsync_movie_threshold')
use_subsync_threshold = settings.subsync.use_subsync_movie_threshold
subsync_threshold = settings.subsync.subsync_movie_threshold
if not use_subsync_threshold or (use_subsync_threshold and percent_score < float(subsync_threshold)):

View File

@ -19,7 +19,7 @@ def subtitles_apply_mods(language, subtitle_path, mods, use_original_format, vid
lang_obj = Language(language)
else:
lang_obj = custom.subzero_language()
single = settings.general.getboolean('single_language')
single = settings.general.single_language
sub = Subtitle(lang_obj, mods=mods, original_format=use_original_format)
with open(subtitle_path, 'rb') as f:

View File

@ -54,11 +54,11 @@ class SubSyncer:
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path, '--vad',
self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.getboolean('force_audio'):
if settings.subsync.force_audio:
unparsed_args.append('--no-fix-framerate')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.getboolean('debug'):
if settings.subsync.debug:
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
@ -72,10 +72,10 @@ class SubSyncer:
'{0}'.format(self.srtin))
raise OSError
else:
if settings.subsync.getboolean('debug'):
if settings.subsync.debug:
return result
if os.path.isfile(self.srtout):
if not settings.subsync.getboolean('debug'):
if not settings.subsync.debug:
os.remove(self.srtin)
os.rename(self.srtout, self.srtin)

View File

@ -24,8 +24,8 @@ from .download import generate_subtitles
def upgrade_subtitles():
use_sonarr = settings.general.getboolean('use_sonarr')
use_radarr = settings.general.getboolean('use_radarr')
use_sonarr = settings.general.use_sonarr
use_radarr = settings.general.use_radarr
if use_sonarr:
episodes_to_upgrade = get_upgradable_episode_subtitles()
@ -218,7 +218,7 @@ def get_queries_condition_parameters():
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = (datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
if settings.general.getboolean('upgrade_manual'):
if settings.general.upgrade_manual:
query_actions = [1, 2, 3, 4, 6]
else:
query_actions = [1, 3]
@ -244,7 +244,7 @@ def parse_language_string(language_string):
def get_upgradable_episode_subtitles():
if not settings.general.getboolean('upgrade_subs'):
if not settings.general.upgrade_subs:
# return an empty set of rows
return select(TableHistory.id) \
.where(TableHistory.id.is_(None)) \
@ -277,7 +277,7 @@ def get_upgradable_episode_subtitles():
def get_upgradable_movies_subtitles():
if not settings.general.getboolean('upgrade_subs'):
if not settings.general.upgrade_subs:
# return an empty set of rows
return select(TableHistoryMovie.id) \
.where(TableHistoryMovie.id.is_(None)) \

View File

@ -11,7 +11,7 @@ from subliminal_patch.subtitle import Subtitle
from pysubs2.formats import get_format_identifier
from languages.get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2
from app.config import settings, get_array_from
from app.config import settings
from utilities.helper import get_target_folder, force_unicode
from utilities.post_processing import pp_replace, set_chmod
from utilities.path_mappings import path_mappings
@ -29,13 +29,13 @@ from .post_processing import postprocessing
def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, audio_language):
logging.debug(f'BAZARR Manually uploading subtitles for this file: {path}')
single = settings.general.getboolean('single_language')
single = settings.general.single_language
use_postprocessing = settings.general.getboolean('use_postprocessing')
use_postprocessing = settings.general.use_postprocessing
postprocessing_cmd = settings.general.postprocessing_cmd
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
language = alpha3_from_alpha2(language)
@ -78,7 +78,7 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
sub = Subtitle(
lang_obj,
mods=get_array_from(settings.general.subzero_mods),
mods=settings.general.subzero_mods,
original_format=use_original_format
)
@ -87,7 +87,7 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
logging.exception('BAZARR Invalid subtitle file: ' + subtitle.filename)
sub.mods = None
if settings.general.getboolean('utf8_encode'):
if settings.general.utf8_encode:
sub.set_encoding("utf-8")
try:

View File

@ -37,7 +37,7 @@ def get_video(path, title, sceneName, providers=None, media_type="movie"):
hash_from = original_path
try:
skip_hashing = settings.general.getboolean('skip_hashing')
skip_hashing = settings.general.skip_hashing
video = parse_video(path, hints=hints, skip_hashing=skip_hashing, dry_run=used_scene_name, providers=providers,
hash_from=hash_from)
video.used_scene_name = used_scene_name

View File

@ -48,7 +48,7 @@ class EventTracker:
self.tracker.store.save()
def track_subtitles(self, provider, action, language):
if not settings.analytics.getboolean('enabled'):
if not settings.analytics.enabled:
return
subtitles_event = self.tracker.create_new_event(name="subtitles")

View File

@ -52,7 +52,7 @@ def backup_to_zip():
backup_filename = f"bazarr_backup_v{os.environ['BAZARR_VERSION']}_{now_string}.zip"
logging.debug(f'Backup filename will be: {backup_filename}')
if not settings.postgresql.getboolean('enabled'):
if not settings.postgresql.enabled:
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
logging.debug(f'Database file path to backup is: {database_src_file}')
@ -71,7 +71,7 @@ def backup_to_zip():
database_backup_file = None
logging.exception('Unable to backup database file.')
config_file = os.path.join(args.config_dir, 'config', 'config.ini')
config_file = os.path.join(args.config_dir, 'config', 'config.yaml')
logging.debug(f'Config file path to backup is: {config_file}')
with ZipFile(os.path.join(get_backup_path(), backup_filename), 'w') as backupZip:
@ -83,12 +83,19 @@ def backup_to_zip():
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
else:
logging.debug('Database file is not included in backup. See previous exception')
backupZip.write(config_file, 'config.ini')
backupZip.write(config_file, 'config.yaml')
def restore_from_backup():
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.ini')
if os.path.isfile(os.path.join(get_restore_path(), 'config.yaml')):
restore_config_path = os.path.join(get_restore_path(), 'config.yaml')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.yaml')
new_config = True
else:
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
dest_config_path = os.path.join(args.config_dir, 'config', 'config.ini')
new_config = False
restore_database_path = os.path.join(get_restore_path(), 'bazarr.db')
dest_database_path = os.path.join(args.config_dir, 'db', 'bazarr.db')
@ -97,8 +104,15 @@ def restore_from_backup():
shutil.copy(restore_config_path, dest_config_path)
os.remove(restore_config_path)
except OSError:
logging.exception(f'Unable to restore or delete config.ini to {dest_config_path}')
if not settings.postgresql.getboolean('enabled'):
logging.exception(f'Unable to restore or delete config file to {dest_config_path}')
else:
if new_config:
if os.path.isfile(os.path.join(get_restore_path(), 'config.ini')):
os.remove(os.path.join(get_restore_path(), 'config.ini'))
else:
if os.path.isfile(os.path.join(get_restore_path(), 'config.yaml')):
os.remove(os.path.join(get_restore_path(), 'config.yaml'))
if not settings.postgresql.enabled:
try:
shutil.copy(restore_database_path, dest_database_path)
os.remove(restore_database_path)

View File

@ -9,9 +9,9 @@ from radarr.rootfolder import check_radarr_rootfolder
def check_health():
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
check_sonarr_rootfolder()
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
check_radarr_rootfolder()
event_stream(type='badges')
@ -24,7 +24,7 @@ def get_health_issues():
health_issues = []
# get Sonarr rootfolder issues
if settings.general.getboolean('use_sonarr'):
if settings.general.use_sonarr:
rootfolder = database.execute(
select(TableShowsRootfolder.path,
TableShowsRootfolder.accessible,
@ -36,7 +36,7 @@ def get_health_issues():
'issue': item.error})
# get Radarr rootfolder issues
if settings.general.getboolean('use_radarr'):
if settings.general.use_radarr:
rootfolder = database.execute(
select(TableMoviesRootfolder.path,
TableMoviesRootfolder.accessible,

View File

@ -2,7 +2,7 @@
import re
from app.config import settings, get_array_from
from app.config import settings
class PathMappings:
@ -11,8 +11,8 @@ class PathMappings:
self.path_mapping_movies = []
def update(self):
self.path_mapping_series = [x for x in get_array_from(settings.general.path_mappings) if x[0] != x[1]]
self.path_mapping_movies = [x for x in get_array_from(settings.general.path_mappings_movie) if x[0] != x[1]]
self.path_mapping_series = [x for x in settings.general.path_mappings if x[0] != x[1]]
self.path_mapping_movies = [x for x in settings.general.path_mappings_movie if x[0] != x[1]]
def path_replace(self, path):
if path is None:

View File

@ -43,7 +43,7 @@ def pp_replace(pp_command, episode, subtitles, language, language_code2, languag
def set_chmod(subtitles_path):
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
'win') and settings.general.chmod_enabled else None
if chmod:
logging.debug(f"BAZARR setting permission to {chmod} on {subtitles_path} after custom post-processing.")
os.chmod(subtitles_path, chmod)

View File

@ -9,7 +9,7 @@
# Bazarr configuration path, must be absolute path
# Vite will use this variable to find your bazarr's configuration file
VITE_BAZARR_CONFIG_FILE="../data/config/config.ini"
VITE_BAZARR_CONFIG_FILE="../data/config/config.yaml"
# Display update section in settings
VITE_CAN_UPDATE=true

View File

@ -2,48 +2,34 @@
/// <reference types="node" />
import { readFile } from "fs/promises";
import { get } from "lodash";
import YAML from "yaml";
class ConfigReader {
config?: string;
config: object;
constructor() {
this.config = undefined;
this.config = {};
}
async open(path: string) {
try {
this.config = await readFile(path, "utf8");
const rawConfig = await readFile(path, "utf8");
this.config = YAML.parse(rawConfig);
} catch (err) {
// We don't want to catch the error here, handle it on getValue method
}
}
getValue(sectionName: string, fieldName: string) {
if (!this.config) {
throw new Error("Cannot find config to read");
}
const targetSection = this.config
.split("\n\n")
.filter((section) => section.includes(`[${sectionName}]`));
const path = `${sectionName}.${fieldName}`;
const result = get(this.config, path);
if (targetSection.length === 0) {
throw new Error(`Cannot find [${sectionName}] section in config`);
if (result === undefined) {
throw new Error(`Failed to find ${path} in the local config file`);
}
const section = targetSection[0];
for (const line of section.split("\n")) {
const matched = line.startsWith(fieldName);
if (matched) {
const results = line.split("=");
if (results.length === 2) {
const key = results[1].trim();
return key;
}
}
}
throw new Error(`Cannot find ${fieldName} in config`);
return result;
}
}

View File

@ -58,7 +58,8 @@
"typescript": "^5",
"vite": "^4.3.0",
"vite-plugin-checker": "^0.5.5",
"vitest": "^0.30.1"
"vitest": "^0.30.1",
"yaml": "^2.3.1"
}
},
"node_modules/@adobe/css-tools": {
@ -4818,6 +4819,14 @@
"node": ">=10"
}
},
"node_modules/cosmiconfig/node_modules/yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"engines": {
"node": ">= 6"
}
},
"node_modules/cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
@ -10439,11 +10448,12 @@
"dev": true
},
"node_modules/yaml": {
"version": "1.10.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
"integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz",
"integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==",
"dev": true,
"engines": {
"node": ">= 6"
"node": ">= 14"
}
},
"node_modules/yargs": {

View File

@ -42,7 +42,6 @@
"@types/react-dom": "^18.2.0",
"@types/react-table": "^7.7.0",
"@vitejs/plugin-react": "^4.0.0",
"vitest": "^0.30.1",
"@vitest/coverage-c8": "^0.30.0",
"@vitest/ui": "^0.30.0",
"clsx": "^1.2.0",
@ -62,7 +61,9 @@
"sass": "^1.62.0",
"typescript": "^5",
"vite": "^4.3.0",
"vite-plugin-checker": "^0.5.5"
"vite-plugin-checker": "^0.5.5",
"vitest": "^0.30.1",
"yaml": "^2.3.1"
},
"scripts": {
"start": "vite",

31
libs/dynaconf/__init__.py Normal file
View File

@ -0,0 +1,31 @@
from __future__ import annotations
from dynaconf.base import LazySettings # noqa
from dynaconf.constants import DEFAULT_SETTINGS_FILES
from dynaconf.contrib import DjangoDynaconf # noqa
from dynaconf.contrib import FlaskDynaconf # noqa
from dynaconf.validator import ValidationError # noqa
from dynaconf.validator import Validator # noqa
settings = LazySettings(
# This global `settings` is deprecated from v3.0.0+
# kept here for backwards compatibility
# To Be Removed in 4.0.x
warn_dynaconf_global_settings=True,
environments=True,
lowercase_read=False,
load_dotenv=True,
default_settings_paths=DEFAULT_SETTINGS_FILES,
)
# This is the new recommended base class alias
Dynaconf = LazySettings # noqa
__all__ = [
"Dynaconf",
"LazySettings",
"Validator",
"FlaskDynaconf",
"ValidationError",
"DjangoDynaconf",
]

1285
libs/dynaconf/base.py Normal file

File diff suppressed because it is too large Load Diff

773
libs/dynaconf/cli.py Normal file
View File

@ -0,0 +1,773 @@
from __future__ import annotations
import importlib
import json
import os
import pprint
import sys
import warnings
import webbrowser
from contextlib import suppress
from pathlib import Path
from dynaconf import constants
from dynaconf import default_settings
from dynaconf import LazySettings
from dynaconf import loaders
from dynaconf import settings as legacy_settings
from dynaconf.loaders.py_loader import get_module
from dynaconf.utils import upperfy
from dynaconf.utils.files import read_file
from dynaconf.utils.functional import empty
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.utils.parse_conf import unparse_conf_data
from dynaconf.validator import ValidationError
from dynaconf.validator import Validator
from dynaconf.vendor import click
from dynaconf.vendor import toml
from dynaconf.vendor import tomllib
os.environ["PYTHONIOENCODING"] = "utf-8"
CWD = None
try:
CWD = Path.cwd()
except FileNotFoundError:
pass
EXTS = ["ini", "toml", "yaml", "json", "py", "env"]
WRITERS = ["ini", "toml", "yaml", "json", "py", "redis", "vault", "env"]
ENC = default_settings.ENCODING_FOR_DYNACONF
def set_settings(ctx, instance=None):
"""Pick correct settings instance and set it to a global variable."""
global settings
settings = None
_echo_enabled = ctx.invoked_subcommand not in ["get", None]
if instance is not None:
if ctx.invoked_subcommand in ["init"]:
raise click.UsageError(
"-i/--instance option is not allowed for `init` command"
)
sys.path.insert(0, ".")
settings = import_settings(instance)
elif "FLASK_APP" in os.environ: # pragma: no cover
with suppress(ImportError, click.UsageError):
from flask.cli import ScriptInfo # noqa
from dynaconf import FlaskDynaconf
flask_app = ScriptInfo().load_app()
settings = FlaskDynaconf(flask_app, **flask_app.config).settings
_echo_enabled and click.echo(
click.style(
"Flask app detected", fg="white", bg="bright_black"
)
)
elif "DJANGO_SETTINGS_MODULE" in os.environ: # pragma: no cover
sys.path.insert(0, os.path.abspath(os.getcwd()))
try:
# Django extension v2
from django.conf import settings # noqa
settings.DYNACONF.configure()
except AttributeError:
settings = LazySettings()
if settings is not None:
_echo_enabled and click.echo(
click.style(
"Django app detected", fg="white", bg="bright_black"
)
)
if settings is None:
if instance is None and "--help" not in click.get_os_args():
if ctx.invoked_subcommand and ctx.invoked_subcommand not in [
"init",
]:
warnings.warn(
"Starting on 3.x the param --instance/-i is now required. "
"try passing it `dynaconf -i path.to.settings <cmd>` "
"Example `dynaconf -i config.settings list` "
)
settings = legacy_settings
else:
settings = LazySettings(create_new_settings=True)
else:
settings = LazySettings()
def import_settings(dotted_path):
"""Import settings instance from python dotted path.
Last item in dotted path must be settings instance.
Example: import_settings('path.to.settings')
"""
if "." in dotted_path:
module, name = dotted_path.rsplit(".", 1)
else:
raise click.UsageError(
f"invalid path to settings instance: {dotted_path}"
)
try:
module = importlib.import_module(module)
except ImportError as e:
raise click.UsageError(e)
except FileNotFoundError:
return
try:
return getattr(module, name)
except AttributeError as e:
raise click.UsageError(e)
def split_vars(_vars):
"""Splits values like foo=bar=zaz in {'foo': 'bar=zaz'}"""
return (
{
upperfy(k.strip()): parse_conf_data(
v.strip(), tomlfy=True, box_settings=settings
)
for k, _, v in [item.partition("=") for item in _vars]
}
if _vars
else {}
)
def read_file_in_root_directory(*names, **kwargs):
"""Read a file on root dir."""
return read_file(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf-8"),
)
def print_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(read_file_in_root_directory("VERSION"))
ctx.exit()
def open_docs(ctx, param, value): # pragma: no cover
if not value or ctx.resilient_parsing:
return
url = "https://dynaconf.com/"
webbrowser.open(url, new=2)
click.echo(f"{url} opened in browser")
ctx.exit()
def show_banner(ctx, param, value):
"""Shows dynaconf awesome banner"""
if not value or ctx.resilient_parsing:
return
set_settings(ctx)
click.echo(settings.dynaconf_banner)
click.echo("Learn more at: http://github.com/dynaconf/dynaconf")
ctx.exit()
@click.group()
@click.option(
"--version",
is_flag=True,
callback=print_version,
expose_value=False,
is_eager=True,
help="Show dynaconf version",
)
@click.option(
"--docs",
is_flag=True,
callback=open_docs,
expose_value=False,
is_eager=True,
help="Open documentation in browser",
)
@click.option(
"--banner",
is_flag=True,
callback=show_banner,
expose_value=False,
is_eager=True,
help="Show awesome banner",
)
@click.option(
"--instance",
"-i",
default=None,
envvar="INSTANCE_FOR_DYNACONF",
help="Custom instance of LazySettings",
)
@click.pass_context
def main(ctx, instance):
"""Dynaconf - Command Line Interface\n
Documentation: https://dynaconf.com/
"""
set_settings(ctx, instance)
@main.command()
@click.option(
"--format", "fileformat", "-f", default="toml", type=click.Choice(EXTS)
)
@click.option(
"--path", "-p", default=CWD, help="defaults to current directory"
)
@click.option(
"--env",
"-e",
default=None,
help="deprecated command (kept for compatibility but unused)",
)
@click.option(
"--vars",
"_vars",
"-v",
multiple=True,
default=None,
help=(
"extra values to write to settings file "
"e.g: `dynaconf init -v NAME=foo -v X=2`"
),
)
@click.option(
"--secrets",
"_secrets",
"-s",
multiple=True,
default=None,
help=(
"secret key values to be written in .secrets "
"e.g: `dynaconf init -s TOKEN=kdslmflds"
),
)
@click.option("--wg/--no-wg", default=True)
@click.option("-y", default=False, is_flag=True)
@click.option("--django", default=os.environ.get("DJANGO_SETTINGS_MODULE"))
@click.pass_context
def init(ctx, fileformat, path, env, _vars, _secrets, wg, y, django):
"""Inits a dynaconf project
By default it creates a settings.toml and a .secrets.toml
for [default|development|staging|testing|production|global] envs.
The format of the files can be changed passing
--format=yaml|json|ini|py.
This command must run on the project's root folder or you must pass
--path=/myproject/root/folder.
The --env/-e is deprecated (kept for compatibility but unused)
"""
click.echo("⚙️ Configuring your Dynaconf environment")
click.echo("-" * 42)
if "FLASK_APP" in os.environ: # pragma: no cover
click.echo(
"⚠️ Flask detected, you can't use `dynaconf init` "
"on a flask project, instead go to dynaconf.com/flask/ "
"for more information.\n"
"Or add the following to your app.py\n"
"\n"
"from dynaconf import FlaskDynaconf\n"
"app = Flask(__name__)\n"
"FlaskDynaconf(app)\n"
)
exit(1)
path = Path(path)
if env is not None:
click.secho(
"⚠️ The --env/-e option is deprecated (kept for\n"
" compatibility but unused)\n",
fg="red",
bold=True,
# stderr=True,
)
if settings.get("create_new_settings") is True:
filename = Path("config.py")
if not filename.exists():
with open(filename, "w") as new_settings:
new_settings.write(
constants.INSTANCE_TEMPLATE.format(
settings_files=[
f"settings.{fileformat}",
f".secrets.{fileformat}",
]
)
)
click.echo(
"🐍 The file `config.py` was generated.\n"
" on your code now use `from config import settings`.\n"
" (you must have `config` importable in your PYTHONPATH).\n"
)
else:
click.echo(
f"⁉️ You already have a {filename} so it is not going to be\n"
" generated for you, you will need to create your own \n"
" settings instance e.g: config.py \n"
" from dynaconf import Dynaconf \n"
" settings = Dynaconf(**options)\n"
)
sys.path.append(str(path))
set_settings(ctx, "config.settings")
env = settings.current_env.lower()
loader = importlib.import_module(f"dynaconf.loaders.{fileformat}_loader")
# Turn foo=bar=zaz in {'foo': 'bar=zaz'}
env_data = split_vars(_vars)
_secrets = split_vars(_secrets)
# create placeholder data for every env
settings_data = {}
secrets_data = {}
if env_data:
settings_data[env] = env_data
settings_data["default"] = {k: "a default value" for k in env_data}
if _secrets:
secrets_data[env] = _secrets
secrets_data["default"] = {k: "a default value" for k in _secrets}
if str(path).endswith(
constants.ALL_EXTENSIONS + ("py",)
): # pragma: no cover # noqa
settings_path = path
secrets_path = path.parent / f".secrets.{fileformat}"
gitignore_path = path.parent / ".gitignore"
else:
if fileformat == "env":
if str(path) in (".env", "./.env"): # pragma: no cover
settings_path = path
elif str(path).endswith("/.env"): # pragma: no cover
settings_path = path
elif str(path).endswith(".env"): # pragma: no cover
settings_path = path.parent / ".env"
else:
settings_path = path / ".env"
Path.touch(settings_path)
secrets_path = None
else:
settings_path = path / f"settings.{fileformat}"
secrets_path = path / f".secrets.{fileformat}"
gitignore_path = path / ".gitignore"
if fileformat in ["py", "env"] or env == "main":
# for Main env, Python and .env formats writes a single env
settings_data = settings_data.get(env, {})
secrets_data = secrets_data.get(env, {})
if not y and settings_path and settings_path.exists(): # pragma: no cover
click.confirm(
f"{settings_path} exists do you want to overwrite it?",
abort=True,
)
if not y and secrets_path and secrets_path.exists(): # pragma: no cover
click.confirm(
f"{secrets_path} exists do you want to overwrite it?",
abort=True,
)
if settings_path:
loader.write(settings_path, settings_data, merge=True)
click.echo(
f"🎛️ {settings_path.name} created to hold your settings.\n"
)
if secrets_path:
loader.write(secrets_path, secrets_data, merge=True)
click.echo(f"🔑 {secrets_path.name} created to hold your secrets.\n")
ignore_line = ".secrets.*"
comment = "\n# Ignore dynaconf secret files\n"
if not gitignore_path.exists():
with open(str(gitignore_path), "w", encoding=ENC) as f:
f.writelines([comment, ignore_line, "\n"])
else:
existing = (
ignore_line in open(str(gitignore_path), encoding=ENC).read()
)
if not existing: # pragma: no cover
with open(str(gitignore_path), "a+", encoding=ENC) as f:
f.writelines([comment, ignore_line, "\n"])
click.echo(
f"🙈 the {secrets_path.name} is also included in `.gitignore` \n"
" beware to not push your secrets to a public repo \n"
" or use dynaconf builtin support for Vault Servers.\n"
)
if django: # pragma: no cover
dj_module, _ = get_module({}, django)
dj_filename = dj_module.__file__
if Path(dj_filename).exists():
click.confirm(
f"{dj_filename} is found do you want to add dynaconf?",
abort=True,
)
with open(dj_filename, "a") as dj_file:
dj_file.write(constants.DJANGO_PATCH)
click.echo("🎠 Now your Django settings are managed by Dynaconf")
else:
click.echo("❌ Django settings file not written.")
else:
click.echo(
"🎉 Dynaconf is configured! read more on https://dynaconf.com\n"
" Use `dynaconf -i config.settings list` to see your settings\n"
)
@main.command(name="get")
@click.argument("key", required=True)
@click.option(
"--default",
"-d",
default=empty,
help="Default value if settings doesn't exist",
)
@click.option(
"--env", "-e", default=None, help="Filters the env to get the values"
)
@click.option(
"--unparse",
"-u",
default=False,
help="Unparse data by adding markers such as @none, @int etc..",
is_flag=True,
)
def get(key, default, env, unparse):
"""Returns the raw value for a settings key.
If result is a dict, list or tuple it is printes as a valid json string.
"""
if env:
env = env.strip()
if key:
key = key.strip()
if env:
settings.setenv(env)
if default is not empty:
result = settings.get(key, default)
else:
result = settings[key] # let the keyerror raises
if unparse:
result = unparse_conf_data(result)
if isinstance(result, (dict, list, tuple)):
result = json.dumps(result, sort_keys=True)
click.echo(result, nl=False)
@main.command(name="list")
@click.option(
"--env", "-e", default=None, help="Filters the env to get the values"
)
@click.option("--key", "-k", default=None, help="Filters a single key")
@click.option(
"--more",
"-m",
default=None,
help="Pagination more|less style",
is_flag=True,
)
@click.option(
"--loader",
"-l",
default=None,
help="a loader identifier to filter e.g: toml|yaml",
)
@click.option(
"--all",
"_all",
"-a",
default=False,
is_flag=True,
help="show dynaconf internal settings?",
)
@click.option(
"--output",
"-o",
type=click.Path(writable=True, dir_okay=False),
default=None,
help="Filepath to write the listed values as json",
)
@click.option(
"--output-flat",
"flat",
is_flag=True,
default=False,
help="Output file is flat (do not include [env] name)",
)
def _list(env, key, more, loader, _all=False, output=None, flat=False):
"""Lists all user defined config values
and if `--all` is passed it also shows dynaconf internal variables.
"""
if env:
env = env.strip()
if key:
key = key.strip()
if loader:
loader = loader.strip()
if env:
settings.setenv(env)
cur_env = settings.current_env.lower()
if cur_env == "main":
flat = True
click.echo(
click.style(
f"Working in {cur_env} environment ",
bold=True,
bg="bright_blue",
fg="bright_white",
)
)
if not loader:
data = settings.as_dict(env=env, internal=_all)
else:
identifier = f"{loader}_{cur_env}"
data = settings._loaded_by_loaders.get(identifier, {})
data = data or settings._loaded_by_loaders.get(loader, {})
# remove to avoid displaying twice
data.pop("SETTINGS_MODULE", None)
def color(_k):
if _k in dir(default_settings):
return "blue"
return "magenta"
def format_setting(_k, _v):
key = click.style(_k, bg=color(_k), fg="bright_white")
data_type = click.style(
f"<{type(_v).__name__}>", bg="bright_black", fg="bright_white"
)
value = pprint.pformat(_v)
return f"{key}{data_type} {value}"
if not key:
datalines = "\n".join(
format_setting(k, v)
for k, v in data.items()
if k not in data.get("RENAMED_VARS", [])
)
(click.echo_via_pager if more else click.echo)(datalines)
if output:
loaders.write(output, data, env=not flat and cur_env)
else:
key = upperfy(key)
try:
value = settings.get(key, empty)
except AttributeError:
value = empty
if value is empty:
click.echo(click.style("Key not found", bg="red", fg="white"))
return
click.echo(format_setting(key, value))
if output:
loaders.write(output, {key: value}, env=not flat and cur_env)
if env:
settings.setenv()
@main.command()
@click.argument("to", required=True, type=click.Choice(WRITERS))
@click.option(
"--vars",
"_vars",
"-v",
multiple=True,
default=None,
help=(
"key values to be written "
"e.g: `dynaconf write toml -e NAME=foo -e X=2"
),
)
@click.option(
"--secrets",
"_secrets",
"-s",
multiple=True,
default=None,
help=(
"secret key values to be written in .secrets "
"e.g: `dynaconf write toml -s TOKEN=kdslmflds -s X=2"
),
)
@click.option(
"--path",
"-p",
default=CWD,
help="defaults to current directory/settings.{ext}",
)
@click.option(
"--env",
"-e",
default="default",
help=(
"env to write to defaults to DEVELOPMENT for files "
"for external sources like Redis and Vault "
"it will be DYNACONF or the value set in "
"$ENVVAR_PREFIX_FOR_DYNACONF"
),
)
@click.option("-y", default=False, is_flag=True)
def write(to, _vars, _secrets, path, env, y):
"""Writes data to specific source"""
_vars = split_vars(_vars)
_secrets = split_vars(_secrets)
loader = importlib.import_module(f"dynaconf.loaders.{to}_loader")
if to in EXTS:
# Lets write to a file
path = Path(path)
if str(path).endswith(constants.ALL_EXTENSIONS + ("py",)):
settings_path = path
secrets_path = path.parent / f".secrets.{to}"
else:
if to == "env":
if str(path) in (".env", "./.env"): # pragma: no cover
settings_path = path
elif str(path).endswith("/.env"):
settings_path = path
elif str(path).endswith(".env"):
settings_path = path.parent / ".env"
else:
settings_path = path / ".env"
Path.touch(settings_path)
secrets_path = None
_vars.update(_secrets)
else:
settings_path = path / f"settings.{to}"
secrets_path = path / f".secrets.{to}"
if (
_vars and not y and settings_path and settings_path.exists()
): # pragma: no cover # noqa
click.confirm(
f"{settings_path} exists do you want to overwrite it?",
abort=True,
)
if (
_secrets and not y and secrets_path and secrets_path.exists()
): # pragma: no cover # noqa
click.confirm(
f"{secrets_path} exists do you want to overwrite it?",
abort=True,
)
if to not in ["py", "env"]:
if _vars:
_vars = {env: _vars}
if _secrets:
_secrets = {env: _secrets}
if _vars and settings_path:
loader.write(settings_path, _vars, merge=True)
click.echo(f"Data successful written to {settings_path}")
if _secrets and secrets_path:
loader.write(secrets_path, _secrets, merge=True)
click.echo(f"Data successful written to {secrets_path}")
else: # pragma: no cover
# lets write to external source
with settings.using_env(env):
# make sure we're in the correct environment
loader.write(settings, _vars, **_secrets)
click.echo(f"Data successful written to {to}")
@main.command()
@click.option(
"--path", "-p", default=CWD, help="defaults to current directory"
)
def validate(path): # pragma: no cover
"""Validates Dynaconf settings based on rules defined in
dynaconf_validators.toml"""
# reads the 'dynaconf_validators.toml' from path
# for each section register the validator for specific env
# call validate
path = Path(path)
if not str(path).endswith(".toml"):
path = path / "dynaconf_validators.toml"
if not path.exists(): # pragma: no cover # noqa
click.echo(click.style(f"{path} not found", fg="white", bg="red"))
sys.exit(1)
try: # try tomlib first
validation_data = tomllib.load(open(str(path), "rb"))
except UnicodeDecodeError: # fallback to legacy toml (TBR in 4.0.0)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
validation_data = toml.load(
open(str(path), encoding=default_settings.ENCODING_FOR_DYNACONF),
)
success = True
for env, name_data in validation_data.items():
for name, data in name_data.items():
if not isinstance(data, dict): # pragma: no cover
click.echo(
click.style(
f"Invalid rule for parameter '{name}'",
fg="white",
bg="yellow",
)
)
else:
data.setdefault("env", env)
click.echo(
click.style(
f"Validating '{name}' with '{data}'",
fg="white",
bg="blue",
)
)
try:
Validator(name, **data).validate(settings)
except ValidationError as e:
click.echo(
click.style(f"Error: {e}", fg="white", bg="red")
)
success = False
if success:
click.echo(click.style("Validation success!", fg="white", bg="green"))
else:
click.echo(click.style("Validation error!", fg="white", bg="red"))
sys.exit(1)
if __name__ == "__main__": # pragma: no cover
main()

View File

@ -0,0 +1,52 @@
# pragma: no cover
from __future__ import annotations
INI_EXTENSIONS = (".ini", ".conf", ".properties")
TOML_EXTENSIONS = (".toml", ".tml")
YAML_EXTENSIONS = (".yaml", ".yml")
JSON_EXTENSIONS = (".json",)
ALL_EXTENSIONS = (
INI_EXTENSIONS + TOML_EXTENSIONS + YAML_EXTENSIONS + JSON_EXTENSIONS
) # noqa
EXTERNAL_LOADERS = {
"ENV": "dynaconf.loaders.env_loader",
"VAULT": "dynaconf.loaders.vault_loader",
"REDIS": "dynaconf.loaders.redis_loader",
}
DJANGO_PATCH = """
# HERE STARTS DYNACONF EXTENSION LOAD (Keep at the very bottom of settings.py)
# Read more at https://www.dynaconf.com/django/
import dynaconf # noqa
settings = dynaconf.DjangoDynaconf(__name__) # noqa
# HERE ENDS DYNACONF EXTENSION LOAD (No more code below this line)
"""
INSTANCE_TEMPLATE = """
from dynaconf import Dynaconf
settings = Dynaconf(
envvar_prefix="DYNACONF",
settings_files={settings_files},
)
# `envvar_prefix` = export envvars with `export DYNACONF_FOO=bar`.
# `settings_files` = Load these files in the order.
"""
EXTS = (
"py",
"toml",
"tml",
"yaml",
"yml",
"ini",
"conf",
"properties",
"json",
)
DEFAULT_SETTINGS_FILES = [f"settings.{ext}" for ext in EXTS] + [
f".secrets.{ext}" for ext in EXTS
]

View File

@ -0,0 +1,5 @@
from __future__ import annotations
from dynaconf.contrib.django_dynaconf_v2 import DjangoDynaconf # noqa
from dynaconf.contrib.flask_dynaconf import DynaconfConfig # noqa
from dynaconf.contrib.flask_dynaconf import FlaskDynaconf # noqa

View File

@ -0,0 +1,142 @@
"""Dynaconf django extension
In the `django_project/settings.py` put at the very bottom of the file:
# HERE STARTS DYNACONF EXTENSION LOAD (Keep at the very bottom of settings.py)
# Read more at https://www.dynaconf.com/django/
import dynaconf # noqa
settings = dynaconf.DjangoDynaconf(__name__) # noqa
# HERE ENDS DYNACONF EXTENSION LOAD (No more code below this line)
Now in the root of your Django project
(the same folder where manage.py is located)
Put your config files `settings.{py|yaml|toml|ini|json}`
and or `.secrets.{py|yaml|toml|ini|json}`
On your projects root folder now you can start as::
DJANGO_DEBUG='false' \
DJANGO_ALLOWED_HOSTS='["localhost"]' \
python manage.py runserver
"""
from __future__ import annotations
import inspect
import os
import sys
import dynaconf
try: # pragma: no cover
from django import conf
from django.conf import settings as django_settings
django_installed = True
except ImportError: # pragma: no cover
django_installed = False
def load(django_settings_module_name=None, **kwargs): # pragma: no cover
if not django_installed:
raise RuntimeError(
"To use this extension django must be installed "
"install it with: pip install django"
)
try:
django_settings_module = sys.modules[django_settings_module_name]
except KeyError:
django_settings_module = sys.modules[
os.environ["DJANGO_SETTINGS_MODULE"]
]
settings_module_name = django_settings_module.__name__
settings_file = os.path.abspath(django_settings_module.__file__)
_root_path = os.path.dirname(settings_file)
# 1) Create the lazy settings object reusing settings_module consts
options = {
k.upper(): v
for k, v in django_settings_module.__dict__.items()
if k.isupper()
}
options.update(kwargs)
options.setdefault(
"SKIP_FILES_FOR_DYNACONF", [settings_file, "dynaconf_merge"]
)
options.setdefault("ROOT_PATH_FOR_DYNACONF", _root_path)
options.setdefault("ENVVAR_PREFIX_FOR_DYNACONF", "DJANGO")
options.setdefault("ENV_SWITCHER_FOR_DYNACONF", "DJANGO_ENV")
options.setdefault("ENVIRONMENTS_FOR_DYNACONF", True)
options.setdefault("load_dotenv", True)
options.setdefault(
"default_settings_paths", dynaconf.DEFAULT_SETTINGS_FILES
)
class UserSettingsHolder(dynaconf.LazySettings):
_django_override = True
lazy_settings = dynaconf.LazySettings(**options)
dynaconf.settings = lazy_settings # rebind the settings
# 2) Set all settings back to django_settings_module for 'django check'
lazy_settings.populate_obj(django_settings_module)
# 3) Bind `settings` and `DYNACONF`
setattr(django_settings_module, "settings", lazy_settings)
setattr(django_settings_module, "DYNACONF", lazy_settings)
# 4) keep django original settings
dj = {}
for key in dir(django_settings):
if (
key.isupper()
and (key != "SETTINGS_MODULE")
and key not in lazy_settings.store
):
dj[key] = getattr(django_settings, key, None)
dj["ORIGINAL_SETTINGS_MODULE"] = django_settings.SETTINGS_MODULE
lazy_settings.update(dj)
# Allow dynaconf_hooks to be in the same folder as the django.settings
dynaconf.loaders.execute_hooks(
"post",
lazy_settings,
lazy_settings.current_env,
modules=[settings_module_name],
files=[settings_file],
)
lazy_settings._loaded_py_modules.insert(0, settings_module_name)
# 5) Patch django.conf.settings
class Wrapper:
# lazy_settings = conf.settings.lazy_settings
def __getattribute__(self, name):
if name == "settings":
return lazy_settings
if name == "UserSettingsHolder":
return UserSettingsHolder
return getattr(conf, name)
# This implementation is recommended by Guido Van Rossum
# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html
sys.modules["django.conf"] = Wrapper()
# 6) Enable standalone scripts to use Dynaconf
# This is for when `django.conf.settings` is imported directly
# on external `scripts` (out of Django's lifetime)
for stack_item in reversed(inspect.stack()):
if isinstance(
stack_item.frame.f_globals.get("settings"), conf.LazySettings
):
stack_item.frame.f_globals["settings"] = lazy_settings
return lazy_settings
# syntax sugar
DjangoDynaconf = load # noqa

View File

@ -0,0 +1,230 @@
from __future__ import annotations
import warnings
from collections import ChainMap
from contextlib import suppress
try:
from flask.config import Config
flask_installed = True
except ImportError: # pragma: no cover
flask_installed = False
Config = object
import dynaconf
import pkg_resources
class FlaskDynaconf:
"""The arguments are.
app = The created app
dynaconf_args = Extra args to be passed to Dynaconf (validator for example)
All other values are stored as config vars specially::
ENVVAR_PREFIX_FOR_DYNACONF = env prefix for your envvars to be loaded
example:
if you set to `MYSITE` then
export MYSITE_SQL_PORT='@int 5445'
with that exported to env you access using:
app.config.SQL_PORT
app.config.get('SQL_PORT')
app.config.get('sql_port')
# get is case insensitive
app.config['SQL_PORT']
Dynaconf uses `@int, @bool, @float, @json` to cast
env vars
SETTINGS_FILE_FOR_DYNACONF = The name of the module or file to use as
default to load settings. If nothing is
passed it will be `settings.*` or value
found in `ENVVAR_FOR_DYNACONF`
Dynaconf supports
.py, .yml, .toml, ini, json
ATTENTION: Take a look at `settings.yml` and `.secrets.yml` to know the
required settings format.
Settings load order in Dynaconf:
- Load all defaults and Flask defaults
- Load all passed variables when applying FlaskDynaconf
- Update with data in settings files
- Update with data in environment vars `ENVVAR_FOR_DYNACONF_`
TOML files are very useful to have `envd` settings, lets say,
`production` and `development`.
You can also achieve the same using multiple `.py` files naming as
`settings.py`, `production_settings.py` and `development_settings.py`
(see examples/validator)
Example::
app = Flask(__name__)
FlaskDynaconf(
app,
ENV='MYSITE',
SETTINGS_FILE='settings.yml',
EXTRA_VALUE='You can add additional config vars here'
)
Take a look at examples/flask in Dynaconf repository
"""
def __init__(
self,
app=None,
instance_relative_config=False,
dynaconf_instance=None,
extensions_list=False,
**kwargs,
):
"""kwargs holds initial dynaconf configuration"""
if not flask_installed: # pragma: no cover
raise RuntimeError(
"To use this extension Flask must be installed "
"install it with: pip install flask"
)
self.kwargs = {k.upper(): v for k, v in kwargs.items()}
kwargs.setdefault("ENVVAR_PREFIX", "FLASK")
env_prefix = f"{kwargs['ENVVAR_PREFIX']}_ENV" # FLASK_ENV
kwargs.setdefault("ENV_SWITCHER", env_prefix)
kwargs.setdefault("ENVIRONMENTS", True)
kwargs.setdefault("load_dotenv", True)
kwargs.setdefault(
"default_settings_paths", dynaconf.DEFAULT_SETTINGS_FILES
)
self.dynaconf_instance = dynaconf_instance
self.instance_relative_config = instance_relative_config
self.extensions_list = extensions_list
if app:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
"""kwargs holds initial dynaconf configuration"""
self.kwargs.update(kwargs)
self.settings = self.dynaconf_instance or dynaconf.LazySettings(
**self.kwargs
)
dynaconf.settings = self.settings # rebind customized settings
app.config = self.make_config(app)
app.dynaconf = self.settings
if self.extensions_list:
if not isinstance(self.extensions_list, str):
self.extensions_list = "EXTENSIONS"
app.config.load_extensions(self.extensions_list)
def make_config(self, app):
root_path = app.root_path
if self.instance_relative_config: # pragma: no cover
root_path = app.instance_path
if self.dynaconf_instance:
self.settings.update(self.kwargs)
return DynaconfConfig(
root_path=root_path,
defaults=app.config,
_settings=self.settings,
_app=app,
)
class DynaconfConfig(Config):
"""
Replacement for flask.config_class that responds as a Dynaconf instance.
"""
def __init__(self, _settings, _app, *args, **kwargs):
"""perform the initial load"""
super().__init__(*args, **kwargs)
# Bring Dynaconf instance value to Flask Config
Config.update(self, _settings.store)
self._settings = _settings
self._app = _app
def __contains__(self, item):
return hasattr(self, item)
def __getitem__(self, key):
try:
return self._settings[key]
except KeyError:
return Config.__getitem__(self, key)
def __setitem__(self, key, value):
"""
Allows app.config['key'] = 'foo'
"""
return self._settings.__setitem__(key, value)
def _chain_map(self):
return ChainMap(self._settings, dict(dict.items(self)))
def keys(self):
return self._chain_map().keys()
def values(self):
return self._chain_map().values()
def items(self):
return self._chain_map().items()
def setdefault(self, key, value=None):
return self._chain_map().setdefault(key, value)
def __iter__(self):
return self._chain_map().__iter__()
def __getattr__(self, name):
"""
First try to get value from dynaconf then from Flask Config
"""
with suppress(AttributeError):
return getattr(self._settings, name)
with suppress(KeyError):
return self[name]
raise AttributeError(
f"'{self.__class__.__name__}' object has no attribute '{name}'"
)
def __call__(self, name, *args, **kwargs):
return self.get(name, *args, **kwargs)
def get(self, key, default=None):
"""Gets config from dynaconf variables
if variables does not exists in dynaconf try getting from
`app.config` to support runtime settings."""
return self._settings.get(key, Config.get(self, key, default))
def load_extensions(self, key="EXTENSIONS", app=None):
"""Loads flask extensions dynamically."""
app = app or self._app
extensions = app.config.get(key)
if not extensions:
warnings.warn(
f"Settings is missing {key} to load Flask Extensions",
RuntimeWarning,
)
return
for object_reference in app.config[key]:
# add a placeholder `name` to create a valid entry point
entry_point_spec = f"__name = {object_reference}"
# parse the entry point specification
entry_point = pkg_resources.EntryPoint.parse(entry_point_spec)
# dynamically resolve the entry point
initializer = entry_point.resolve()
# Invoke extension initializer
initializer(app)

View File

@ -0,0 +1,252 @@
from __future__ import annotations
import importlib
import os
import sys
import warnings
from dynaconf.utils import RENAMED_VARS
from dynaconf.utils import upperfy
from dynaconf.utils import warn_deprecations
from dynaconf.utils.files import find_file
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.vendor.dotenv import load_dotenv
def try_renamed(key, value, older_key, current_key):
if value is None:
if key == current_key:
if older_key in os.environ:
warnings.warn(
f"{older_key} is deprecated please use {current_key}",
DeprecationWarning,
)
value = os.environ[older_key]
return value
def get(key, default=None):
value = os.environ.get(upperfy(key))
# compatibility with renamed variables
for old, new in RENAMED_VARS.items():
value = try_renamed(key, value, old, new)
return (
parse_conf_data(value, tomlfy=True, box_settings={})
if value is not None
else default
)
def start_dotenv(obj=None, root_path=None):
# load_from_dotenv_if_installed
obj = obj or {}
_find_file = getattr(obj, "find_file", find_file)
root_path = (
root_path
or getattr(obj, "_root_path", None)
or get("ROOT_PATH_FOR_DYNACONF")
)
dotenv_path = (
obj.get("DOTENV_PATH_FOR_DYNACONF")
or get("DOTENV_PATH_FOR_DYNACONF")
or _find_file(".env", project_root=root_path)
)
load_dotenv(
dotenv_path,
verbose=obj.get("DOTENV_VERBOSE_FOR_DYNACONF", False),
override=obj.get("DOTENV_OVERRIDE_FOR_DYNACONF", False),
)
warn_deprecations(os.environ)
def reload(load_dotenv=None, *args, **kwargs):
if load_dotenv:
start_dotenv(*args, **kwargs)
importlib.reload(sys.modules[__name__])
# default proj root
# pragma: no cover
ROOT_PATH_FOR_DYNACONF = get("ROOT_PATH_FOR_DYNACONF", None)
# Default settings file
SETTINGS_FILE_FOR_DYNACONF = get("SETTINGS_FILE_FOR_DYNACONF", [])
# MISPELLS `FILES` when/if it happens
mispelled_files = get("SETTINGS_FILES_FOR_DYNACONF", None)
if not SETTINGS_FILE_FOR_DYNACONF and mispelled_files is not None:
SETTINGS_FILE_FOR_DYNACONF = mispelled_files
# # ENV SETTINGS
# # In dynaconf 1.0.0 `NAMESPACE` got renamed to `ENV`
# If provided environments will be loaded separately
ENVIRONMENTS_FOR_DYNACONF = get("ENVIRONMENTS_FOR_DYNACONF", False)
MAIN_ENV_FOR_DYNACONF = get("MAIN_ENV_FOR_DYNACONF", "MAIN")
# If False dynaconf will allow access to first level settings only in upper
LOWERCASE_READ_FOR_DYNACONF = get("LOWERCASE_READ_FOR_DYNACONF", True)
# The environment variable to switch current env
ENV_SWITCHER_FOR_DYNACONF = get(
"ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
)
# The current env by default is DEVELOPMENT
# to switch is needed to `export ENV_FOR_DYNACONF=PRODUCTION`
# or put that value in .env file
# this value is used only when reading files like .toml|yaml|ini|json
ENV_FOR_DYNACONF = get(ENV_SWITCHER_FOR_DYNACONF, "DEVELOPMENT")
# This variable exists to support `from_env` method
FORCE_ENV_FOR_DYNACONF = get("FORCE_ENV_FOR_DYNACONF", None)
# Default values is taken from DEFAULT pseudo env
# this value is used only when reading files like .toml|yaml|ini|json
DEFAULT_ENV_FOR_DYNACONF = get("DEFAULT_ENV_FOR_DYNACONF", "DEFAULT")
# Global values are taken from DYNACONF env used for exported envvars
# Values here overwrites all other envs
# This namespace is used for files and also envvars
ENVVAR_PREFIX_FOR_DYNACONF = get("ENVVAR_PREFIX_FOR_DYNACONF", "DYNACONF")
# By default all environment variables (filtered by `envvar_prefix`) will
# be pulled into settings space. In case some of them are polluting the space,
# setting this flag to `True` will change this behaviour.
# Only "known" variables will be considered -- that is variables defined before
# in settings files (or includes/preloads).
IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF = get(
"IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF", False
)
AUTO_CAST_FOR_DYNACONF = get("AUTO_CAST_FOR_DYNACONF", True)
# The default encoding to open settings files
ENCODING_FOR_DYNACONF = get("ENCODING_FOR_DYNACONF", "utf-8")
# Merge objects on load
MERGE_ENABLED_FOR_DYNACONF = get("MERGE_ENABLED_FOR_DYNACONF", False)
# Lookup keys considering dots as separators
DOTTED_LOOKUP_FOR_DYNACONF = get("DOTTED_LOOKUP_FOR_DYNACONF", True)
# BY default `__` is the separator for nested env vars
# export `DYNACONF__DATABASE__server=server.com`
# export `DYNACONF__DATABASE__PORT=6666`
# Should result in settings.DATABASE == {'server': 'server.com', 'PORT': 6666}
# To disable it one can set `NESTED_SEPARATOR_FOR_DYNACONF=false`
NESTED_SEPARATOR_FOR_DYNACONF = get("NESTED_SEPARATOR_FOR_DYNACONF", "__")
# The env var specifying settings module
ENVVAR_FOR_DYNACONF = get("ENVVAR_FOR_DYNACONF", "SETTINGS_FILE_FOR_DYNACONF")
# Default values for redis configs
default_redis = {
"host": get("REDIS_HOST_FOR_DYNACONF", "localhost"),
"port": int(get("REDIS_PORT_FOR_DYNACONF", 6379)),
"db": int(get("REDIS_DB_FOR_DYNACONF", 0)),
"decode_responses": get("REDIS_DECODE_FOR_DYNACONF", True),
"username": get("REDIS_USERNAME_FOR_DYNACONF", None),
"password": get("REDIS_PASSWORD_FOR_DYNACONF", None),
}
REDIS_FOR_DYNACONF = get("REDIS_FOR_DYNACONF", default_redis)
REDIS_ENABLED_FOR_DYNACONF = get("REDIS_ENABLED_FOR_DYNACONF", False)
# Hashicorp Vault Project
vault_scheme = get("VAULT_SCHEME_FOR_DYNACONF", "http")
vault_host = get("VAULT_HOST_FOR_DYNACONF", "localhost")
vault_port = get("VAULT_PORT_FOR_DYNACONF", "8200")
default_vault = {
"url": get(
"VAULT_URL_FOR_DYNACONF", f"{vault_scheme}://{vault_host}:{vault_port}"
),
"token": get("VAULT_TOKEN_FOR_DYNACONF", None),
"cert": get("VAULT_CERT_FOR_DYNACONF", None),
"verify": get("VAULT_VERIFY_FOR_DYNACONF", None),
"timeout": get("VAULT_TIMEOUT_FOR_DYNACONF", None),
"proxies": get("VAULT_PROXIES_FOR_DYNACONF", None),
"allow_redirects": get("VAULT_ALLOW_REDIRECTS_FOR_DYNACONF", None),
"namespace": get("VAULT_NAMESPACE_FOR_DYNACONF", None),
}
VAULT_FOR_DYNACONF = get("VAULT_FOR_DYNACONF", default_vault)
VAULT_ENABLED_FOR_DYNACONF = get("VAULT_ENABLED_FOR_DYNACONF", False)
VAULT_PATH_FOR_DYNACONF = get("VAULT_PATH_FOR_DYNACONF", "dynaconf")
VAULT_MOUNT_POINT_FOR_DYNACONF = get(
"VAULT_MOUNT_POINT_FOR_DYNACONF", "secret"
)
VAULT_ROOT_TOKEN_FOR_DYNACONF = get("VAULT_ROOT_TOKEN_FOR_DYNACONF", None)
VAULT_KV_VERSION_FOR_DYNACONF = get("VAULT_KV_VERSION_FOR_DYNACONF", 1)
VAULT_AUTH_WITH_IAM_FOR_DYNACONF = get(
"VAULT_AUTH_WITH_IAM_FOR_DYNACONF", False
)
VAULT_AUTH_ROLE_FOR_DYNACONF = get("VAULT_AUTH_ROLE_FOR_DYNACONF", None)
VAULT_ROLE_ID_FOR_DYNACONF = get("VAULT_ROLE_ID_FOR_DYNACONF", None)
VAULT_SECRET_ID_FOR_DYNACONF = get("VAULT_SECRET_ID_FOR_DYNACONF", None)
# Only core loaders defined on this list will be invoked
core_loaders = ["YAML", "TOML", "INI", "JSON", "PY"]
CORE_LOADERS_FOR_DYNACONF = get("CORE_LOADERS_FOR_DYNACONF", core_loaders)
# External Loaders to read vars from different data stores
default_loaders = [
"dynaconf.loaders.env_loader",
# 'dynaconf.loaders.redis_loader'
# 'dynaconf.loaders.vault_loader'
]
LOADERS_FOR_DYNACONF = get("LOADERS_FOR_DYNACONF", default_loaders)
# Errors in loaders should be silenced?
SILENT_ERRORS_FOR_DYNACONF = get("SILENT_ERRORS_FOR_DYNACONF", True)
# always fresh variables
FRESH_VARS_FOR_DYNACONF = get("FRESH_VARS_FOR_DYNACONF", [])
DOTENV_PATH_FOR_DYNACONF = get("DOTENV_PATH_FOR_DYNACONF", None)
DOTENV_VERBOSE_FOR_DYNACONF = get("DOTENV_VERBOSE_FOR_DYNACONF", False)
DOTENV_OVERRIDE_FOR_DYNACONF = get("DOTENV_OVERRIDE_FOR_DYNACONF", False)
# Currently this is only used by cli. INSTANCE_FOR_DYNACONF specifies python
# dotted path to custom LazySettings instance. Last dotted path item should be
# instance of LazySettings.
INSTANCE_FOR_DYNACONF = get("INSTANCE_FOR_DYNACONF", None)
# https://msg.pyyaml.org/load
YAML_LOADER_FOR_DYNACONF = get("YAML_LOADER_FOR_DYNACONF", "safe_load")
# Use commentjson? https://commentjson.readthedocs.io/en/latest/
COMMENTJSON_ENABLED_FOR_DYNACONF = get(
"COMMENTJSON_ENABLED_FOR_DYNACONF", False
)
# Extra file, or list of files where to look for secrets
# useful for CI environment like jenkins
# where you can export this variable pointing to a local
# absolute path of the secrets file.
SECRETS_FOR_DYNACONF = get("SECRETS_FOR_DYNACONF", None)
# To include extra paths based on envvar
INCLUDES_FOR_DYNACONF = get("INCLUDES_FOR_DYNACONF", [])
# To pre-load extra paths based on envvar
PRELOAD_FOR_DYNACONF = get("PRELOAD_FOR_DYNACONF", [])
# Files to skip if found on search tree
SKIP_FILES_FOR_DYNACONF = get("SKIP_FILES_FOR_DYNACONF", [])
# YAML reads empty vars as None, should dynaconf apply validator defaults?
# this is set to None, then evaluated on base.Settings.setdefault
# possible values are True/False
APPLY_DEFAULT_ON_NONE_FOR_DYNACONF = get(
"APPLY_DEFAULT_ON_NONE_FOR_DYNACONF", None
)
# Backwards compatibility with renamed variables
for old, new in RENAMED_VARS.items():
setattr(sys.modules[__name__], old, locals()[new])

View File

@ -0,0 +1,277 @@
from __future__ import annotations
import importlib
import os
from dynaconf import constants as ct
from dynaconf import default_settings
from dynaconf.loaders import ini_loader
from dynaconf.loaders import json_loader
from dynaconf.loaders import py_loader
from dynaconf.loaders import toml_loader
from dynaconf.loaders import yaml_loader
from dynaconf.utils import deduplicate
from dynaconf.utils import ensure_a_list
from dynaconf.utils.boxing import DynaBox
from dynaconf.utils.files import get_local_filename
from dynaconf.utils.parse_conf import false_values
def default_loader(obj, defaults=None):
"""Loads default settings and check if there are overridings
exported as environment variables"""
defaults = defaults or {}
default_settings_values = {
key: value
for key, value in default_settings.__dict__.items() # noqa
if key.isupper()
}
all_keys = deduplicate(
list(defaults.keys()) + list(default_settings_values.keys())
)
for key in all_keys:
if not obj.exists(key):
value = defaults.get(key, default_settings_values.get(key))
obj.set(key, value)
# start dotenv to get default env vars from there
# check overrides in env vars
if obj.get("load_dotenv") is True:
default_settings.start_dotenv(obj)
# Deal with cases where a custom ENV_SWITCHER_IS_PROVIDED
# Example: Flask and Django Extensions
env_switcher = defaults.get(
"ENV_SWITCHER_FOR_DYNACONF", "ENV_FOR_DYNACONF"
)
for key in all_keys:
if key not in default_settings_values.keys():
continue
env_value = obj.get_environ(
env_switcher if key == "ENV_FOR_DYNACONF" else key,
default="_not_found",
)
if env_value != "_not_found":
obj.set(key, env_value, tomlfy=True)
def _run_hook_module(hook, hook_module, obj, key=None):
"""Run the hook function from the settings obj.
given a hook name, a hook_module and a settings object
load the function and execute if found.
"""
if hook in obj._loaded_hooks.get(hook_module.__file__, {}):
# already loaded
return
if hook_module and getattr(hook_module, "_error", False):
if not isinstance(hook_module._error, FileNotFoundError):
raise hook_module._error
hook_func = getattr(hook_module, hook, None)
if hook_func:
hook_dict = hook_func(obj.dynaconf.clone())
if hook_dict:
merge = hook_dict.pop(
"dynaconf_merge", hook_dict.pop("DYNACONF_MERGE", False)
)
if key and key in hook_dict:
obj.set(key, hook_dict[key], tomlfy=False, merge=merge)
elif not key:
obj.update(hook_dict, tomlfy=False, merge=merge)
obj._loaded_hooks[hook_module.__file__][hook] = hook_dict
def execute_hooks(
hook, obj, env=None, silent=True, key=None, modules=None, files=None
):
"""Execute dynaconf_hooks from module or filepath."""
if hook not in ["post"]:
raise ValueError(f"hook {hook} not supported yet.")
# try to load hooks using python module __name__
modules = modules or obj._loaded_py_modules
for loaded_module in modules:
hook_module_name = ".".join(
loaded_module.split(".")[:-1] + ["dynaconf_hooks"]
)
try:
hook_module = importlib.import_module(hook_module_name)
except (ImportError, TypeError):
# There was no hook on the same path as a python module
continue
else:
_run_hook_module(
hook=hook,
hook_module=hook_module,
obj=obj,
key=key,
)
# Try to load from python filename path
files = files or obj._loaded_files
for loaded_file in files:
hook_file = os.path.join(
os.path.dirname(loaded_file), "dynaconf_hooks.py"
)
hook_module = py_loader.import_from_filename(
obj, hook_file, silent=silent
)
if not hook_module:
# There was no hook on the same path as a python file
continue
_run_hook_module(
hook=hook,
hook_module=hook_module,
obj=obj,
key=key,
)
def settings_loader(
obj, settings_module=None, env=None, silent=True, key=None, filename=None
):
"""Loads from defined settings module
:param obj: A dynaconf instance
:param settings_module: A path or a list of paths e.g settings.toml
:param env: Env to look for data defaults: development
:param silent: Boolean to raise loading errors
:param key: Load a single key if provided
:param filename: optional filename to override the settings_module
"""
if filename is None:
settings_module = settings_module or obj.settings_module
if not settings_module: # pragma: no cover
return
files = ensure_a_list(settings_module)
else:
files = ensure_a_list(filename)
files.extend(ensure_a_list(obj.get("SECRETS_FOR_DYNACONF", None)))
found_files = []
modules_names = []
for item in files:
item = str(item) # Ensure str in case of LocalPath/Path is passed.
if item.endswith(ct.ALL_EXTENSIONS + (".py",)):
p_root = obj._root_path or (
os.path.dirname(found_files[0]) if found_files else None
)
found = obj.find_file(item, project_root=p_root)
if found:
found_files.append(found)
else:
# a bare python module name w/o extension
modules_names.append(item)
enabled_core_loaders = [
item.upper() for item in obj.get("CORE_LOADERS_FOR_DYNACONF") or []
]
# add `.local.` to found_files list to search for local files.
found_files.extend(
[
get_local_filename(item)
for item in found_files
if ".local." not in str(item)
]
)
for mod_file in modules_names + found_files:
# can be set to multiple files settings.py,settings.yaml,...
# Cascade all loaders
loaders = [
{"ext": ct.YAML_EXTENSIONS, "name": "YAML", "loader": yaml_loader},
{"ext": ct.TOML_EXTENSIONS, "name": "TOML", "loader": toml_loader},
{"ext": ct.INI_EXTENSIONS, "name": "INI", "loader": ini_loader},
{"ext": ct.JSON_EXTENSIONS, "name": "JSON", "loader": json_loader},
]
for loader in loaders:
if loader["name"] not in enabled_core_loaders:
continue
if mod_file.endswith(loader["ext"]):
loader["loader"].load(
obj, filename=mod_file, env=env, silent=silent, key=key
)
continue
if mod_file.endswith(ct.ALL_EXTENSIONS):
continue
if "PY" not in enabled_core_loaders:
# pyloader is disabled
continue
# must be Python file or module
# load from default defined module settings.py or .secrets.py if exists
py_loader.load(obj, mod_file, key=key)
# load from the current env e.g: development_settings.py
env = env or obj.current_env
if mod_file.endswith(".py"):
if ".secrets.py" == mod_file:
tmpl = ".{0}_{1}{2}"
mod_file = "secrets.py"
else:
tmpl = "{0}_{1}{2}"
dirname = os.path.dirname(mod_file)
filename, extension = os.path.splitext(os.path.basename(mod_file))
new_filename = tmpl.format(env.lower(), filename, extension)
env_mod_file = os.path.join(dirname, new_filename)
global_filename = tmpl.format("global", filename, extension)
global_mod_file = os.path.join(dirname, global_filename)
else:
env_mod_file = f"{env.lower()}_{mod_file}"
global_mod_file = f"global_{mod_file}"
py_loader.load(
obj,
env_mod_file,
identifier=f"py_{env.upper()}",
silent=True,
key=key,
)
# load from global_settings.py
py_loader.load(
obj, global_mod_file, identifier="py_global", silent=True, key=key
)
def enable_external_loaders(obj):
"""Enable external service loaders like `VAULT_` and `REDIS_`
looks forenv variables like `REDIS_ENABLED_FOR_DYNACONF`
"""
for name, loader in ct.EXTERNAL_LOADERS.items():
enabled = getattr(obj, f"{name.upper()}_ENABLED_FOR_DYNACONF", False)
if (
enabled
and enabled not in false_values
and loader not in obj.LOADERS_FOR_DYNACONF
): # noqa
obj.LOADERS_FOR_DYNACONF.insert(0, loader)
def write(filename, data, env=None):
"""Writes `data` to `filename` infers format by file extension."""
loader_name = f"{filename.rpartition('.')[-1]}_loader"
loader = globals().get(loader_name)
if not loader:
raise OSError(f"{loader_name} cannot be found.")
data = DynaBox(data, box_settings={}).to_dict()
if loader is not py_loader and env and env not in data:
data = {env: data}
loader.write(filename, data, merge=False)

View File

@ -0,0 +1,195 @@
from __future__ import annotations
import io
import warnings
from dynaconf.utils import build_env_list
from dynaconf.utils import ensure_a_list
from dynaconf.utils import upperfy
class BaseLoader:
"""Base loader for dynaconf source files.
:param obj: {[LazySettings]} -- [Dynaconf settings]
:param env: {[string]} -- [the current env to be loaded defaults to
[development]]
:param identifier: {[string]} -- [identifier ini, yaml, json, py, toml]
:param extensions: {[list]} -- [List of extensions with dots ['.a', '.b']]
:param file_reader: {[callable]} -- [reads file return dict]
:param string_reader: {[callable]} -- [reads string return dict]
"""
def __init__(
self,
obj,
env,
identifier,
extensions,
file_reader,
string_reader,
opener_params=None,
):
"""Instantiates a loader for different sources"""
self.obj = obj
self.env = env or obj.current_env
self.identifier = identifier
self.extensions = extensions
self.file_reader = file_reader
self.string_reader = string_reader
self.opener_params = opener_params or {
"mode": "r",
"encoding": obj.get("ENCODING_FOR_DYNACONF", "utf-8"),
}
@staticmethod
def warn_not_installed(obj, identifier): # pragma: no cover
if identifier not in obj._not_installed_warnings:
warnings.warn(
f"{identifier} support is not installed in your environment. "
f"`pip install dynaconf[{identifier}]`"
)
obj._not_installed_warnings.append(identifier)
def load(self, filename=None, key=None, silent=True):
"""
Reads and loads in to `self.obj` a single key or all keys from source
:param filename: Optional filename to load
:param key: if provided load a single key
:param silent: if load errors should be silenced
"""
filename = filename or self.obj.get(self.identifier.upper())
if not filename:
return
if not isinstance(filename, (list, tuple)):
split_files = ensure_a_list(filename)
if all([f.endswith(self.extensions) for f in split_files]): # noqa
files = split_files # it is a ['file.ext', ...]
else: # it is a single config as string
files = [filename]
else: # it is already a list/tuple
files = filename
source_data = self.get_source_data(files)
if self.obj.get("ENVIRONMENTS_FOR_DYNACONF") is False:
self._envless_load(source_data, silent, key)
else:
self._load_all_envs(source_data, silent, key)
def get_source_data(self, files):
"""Reads each file and returns source data for each file
{"path/to/file.ext": {"key": "value"}}
"""
data = {}
for source_file in files:
if source_file.endswith(self.extensions):
try:
with open(source_file, **self.opener_params) as open_file:
content = self.file_reader(open_file)
self.obj._loaded_files.append(source_file)
if content:
data[source_file] = content
except OSError as e:
if ".local." not in source_file:
warnings.warn(
f"{self.identifier}_loader: {source_file} "
f":{str(e)}"
)
else:
# for tests it is possible to pass string
content = self.string_reader(source_file)
if content:
data[source_file] = content
return data
def _envless_load(self, source_data, silent=True, key=None):
"""Load all the keys from each file without env separation"""
for file_data in source_data.values():
self._set_data_to_obj(
file_data,
self.identifier,
key=key,
)
def _load_all_envs(self, source_data, silent=True, key=None):
"""Load configs from files separating by each environment"""
for file_data in source_data.values():
# env name is checked in lower
file_data = {k.lower(): value for k, value in file_data.items()}
# is there a `dynaconf_merge` on top level of file?
file_merge = file_data.get("dynaconf_merge")
# is there a flag disabling dotted lookup on file?
file_dotted_lookup = file_data.get("dynaconf_dotted_lookup")
for env in build_env_list(self.obj, self.env):
env = env.lower() # lower for better comparison
try:
data = file_data[env] or {}
except KeyError:
if silent:
continue
raise
if not data:
continue
self._set_data_to_obj(
data,
f"{self.identifier}_{env}",
file_merge,
key,
file_dotted_lookup=file_dotted_lookup,
)
def _set_data_to_obj(
self,
data,
identifier,
file_merge=None,
key=False,
file_dotted_lookup=None,
):
"""Calls settings.set to add the keys"""
# data 1st level keys should be transformed to upper case.
data = {upperfy(k): v for k, v in data.items()}
if key:
key = upperfy(key)
if self.obj.filter_strategy:
data = self.obj.filter_strategy(data)
# is there a `dynaconf_merge` inside an `[env]`?
file_merge = file_merge or data.pop("DYNACONF_MERGE", False)
# If not passed or passed as None,
# look for inner [env] value, or default settings.
if file_dotted_lookup is None:
file_dotted_lookup = data.pop(
"DYNACONF_DOTTED_LOOKUP",
self.obj.get("DOTTED_LOOKUP_FOR_DYNACONF"),
)
if not key:
self.obj.update(
data,
loader_identifier=identifier,
merge=file_merge,
dotted_lookup=file_dotted_lookup,
)
elif key in data:
self.obj.set(
key,
data.get(key),
loader_identifier=identifier,
merge=file_merge,
dotted_lookup=file_dotted_lookup,
)

View File

@ -0,0 +1,108 @@
from __future__ import annotations
from os import environ
from dynaconf.utils import missing
from dynaconf.utils import upperfy
from dynaconf.utils.parse_conf import parse_conf_data
DOTENV_IMPORTED = False
try:
from dynaconf.vendor.dotenv import cli as dotenv_cli
DOTENV_IMPORTED = True
except ImportError:
pass
except FileNotFoundError:
pass
IDENTIFIER = "env"
def load(obj, env=None, silent=True, key=None):
"""Loads envvars with prefixes:
`DYNACONF_` (default global) or `$(ENVVAR_PREFIX_FOR_DYNACONF)_`
"""
global_prefix = obj.get("ENVVAR_PREFIX_FOR_DYNACONF")
if global_prefix is False or global_prefix.upper() != "DYNACONF":
load_from_env(obj, "DYNACONF", key, silent, IDENTIFIER + "_global")
# Load the global env if exists and overwrite everything
load_from_env(obj, global_prefix, key, silent, IDENTIFIER + "_global")
def load_from_env(
obj,
prefix=False,
key=None,
silent=False,
identifier=IDENTIFIER,
env=False, # backwards compatibility bc renamed param
):
if prefix is False and env is not False:
prefix = env
env_ = ""
if prefix is not False:
if not isinstance(prefix, str):
raise TypeError("`prefix/env` must be str or False")
prefix = prefix.upper()
env_ = f"{prefix}_"
# Load a single environment variable explicitly.
if key:
key = upperfy(key)
value = environ.get(f"{env_}{key}")
if value:
try: # obj is a Settings
obj.set(key, value, loader_identifier=identifier, tomlfy=True)
except AttributeError: # obj is a dict
obj[key] = parse_conf_data(
value, tomlfy=True, box_settings=obj
)
# Load environment variables in bulk (when matching).
else:
# Only known variables should be loaded from environment?
ignore_unknown = obj.get("IGNORE_UNKNOWN_ENVVARS_FOR_DYNACONF")
trim_len = len(env_)
data = {
key[trim_len:]: parse_conf_data(
data, tomlfy=True, box_settings=obj
)
for key, data in environ.items()
if key.startswith(env_)
and not (
# Ignore environment variables that haven't been
# pre-defined in settings space.
ignore_unknown
and obj.get(key[trim_len:], default=missing) is missing
)
}
# Update the settings space based on gathered data from environment.
if data:
filter_strategy = obj.get("FILTER_STRATEGY")
if filter_strategy:
data = filter_strategy(data)
obj.update(data, loader_identifier=identifier)
def write(settings_path, settings_data, **kwargs):
"""Write data to .env file"""
if not DOTENV_IMPORTED:
return
for key, value in settings_data.items():
quote_mode = (
isinstance(value, str)
and (value.startswith("'") or value.startswith('"'))
) or isinstance(value, (list, dict))
dotenv_cli.set_key(
str(settings_path),
key,
str(value),
quote_mode="always" if quote_mode else "none",
)

View File

@ -0,0 +1,62 @@
from __future__ import annotations
import io
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import INI_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
try:
from configobj import ConfigObj
except ImportError: # pragma: no cover
ConfigObj = None
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
if ConfigObj is None: # pragma: no cover
BaseLoader.warn_not_installed(obj, "ini")
return
loader = BaseLoader(
obj=obj,
env=env,
identifier="ini",
extensions=INI_EXTENSIONS,
file_reader=lambda fileobj: ConfigObj(fileobj).dict(),
string_reader=lambda strobj: ConfigObj(strobj.split("\n")).dict(),
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(ConfigObj(open_file).dict(), settings_data)
new = ConfigObj()
new.update(settings_data)
new.write(open(str(settings_path), "bw"))

View File

@ -0,0 +1,80 @@
from __future__ import annotations
import io
import json
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import JSON_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.utils.parse_conf import try_to_encode
try: # pragma: no cover
import commentjson
except ImportError: # pragma: no cover
commentjson = None
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
if (
obj.get("COMMENTJSON_ENABLED_FOR_DYNACONF") and commentjson
): # pragma: no cover # noqa
file_reader = commentjson.load
string_reader = commentjson.loads
else:
file_reader = json.load
string_reader = json.loads
loader = BaseLoader(
obj=obj,
env=env,
identifier="json",
extensions=JSON_EXTENSIONS,
file_reader=file_reader,
string_reader=string_reader,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(json.load(open_file), settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
json.dump(settings_data, open_file, cls=DynaconfEncoder)
class DynaconfEncoder(json.JSONEncoder):
"""Transform Dynaconf custom types instances to json representation"""
def default(self, o):
return try_to_encode(o, callback=super().default)

View File

@ -0,0 +1,148 @@
from __future__ import annotations
import errno
import importlib
import inspect
import io
import types
from contextlib import suppress
from pathlib import Path
from dynaconf import default_settings
from dynaconf.utils import DynaconfDict
from dynaconf.utils import object_merge
from dynaconf.utils import upperfy
from dynaconf.utils.files import find_file
def load(obj, settings_module, identifier="py", silent=False, key=None):
"""Tries to import a python module"""
mod, loaded_from = get_module(obj, settings_module, silent)
if not (mod and loaded_from):
return
load_from_python_object(obj, mod, settings_module, key, identifier)
def load_from_python_object(
obj, mod, settings_module, key=None, identifier=None
):
file_merge = getattr(mod, "dynaconf_merge", False) or getattr(
mod, "DYNACONF_MERGE", False
)
for setting in dir(mod):
# A setting var in a Python file should start with upper case
# valid: A_value=1, ABC_value=3 A_BBB__default=1
# invalid: a_value=1, MyValue=3
# This is to avoid loading functions, classes and built-ins
if setting.split("__")[0].isupper():
if key is None or key == setting:
setting_value = getattr(mod, setting)
obj.set(
setting,
setting_value,
loader_identifier=identifier,
merge=file_merge,
)
obj._loaded_py_modules.append(mod.__name__)
obj._loaded_files.append(mod.__file__)
def try_to_load_from_py_module_name(
obj, name, key=None, identifier="py", silent=False
):
"""Try to load module by its string name.
Arguments:
obj {LAzySettings} -- Dynaconf settings instance
name {str} -- Name of the module e.g: foo.bar.zaz
Keyword Arguments:
key {str} -- Single key to be loaded (default: {None})
identifier {str} -- Name of identifier to store (default: 'py')
silent {bool} -- Weather to raise or silence exceptions.
"""
ctx = suppress(ImportError, TypeError) if silent else suppress()
with ctx:
mod = importlib.import_module(str(name))
load_from_python_object(obj, mod, name, key, identifier)
return True # loaded ok!
# if it reaches this point that means exception occurred, module not found.
return False
def get_module(obj, filename, silent=False):
try:
mod = importlib.import_module(filename)
loaded_from = "module"
mod.is_error = False
except (ImportError, TypeError):
mod = import_from_filename(obj, filename, silent=silent)
if mod and not mod._is_error:
loaded_from = "filename"
else:
# it is important to return None in case of not loaded
loaded_from = None
return mod, loaded_from
def import_from_filename(obj, filename, silent=False): # pragma: no cover
"""If settings_module is a filename path import it."""
if filename in [item.filename for item in inspect.stack()]:
raise ImportError(
"Looks like you are loading dynaconf "
f"from inside the {filename} file and then it is trying "
"to load itself entering in a circular reference "
"problem. To solve it you have to "
"invoke your program from another root folder "
"or rename your program file."
)
_find_file = getattr(obj, "find_file", find_file)
if not filename.endswith(".py"):
filename = f"{filename}.py"
if filename in default_settings.SETTINGS_FILE_FOR_DYNACONF:
silent = True
mod = types.ModuleType(filename.rstrip(".py"))
mod.__file__ = filename
mod._is_error = False
mod._error = None
try:
with open(
_find_file(filename),
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as config_file:
exec(compile(config_file.read(), filename, "exec"), mod.__dict__)
except OSError as e:
e.strerror = (
f"py_loader: error loading file " f"({e.strerror} {filename})\n"
)
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return
mod._is_error = True
mod._error = e
return mod
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
existing = DynaconfDict()
load(existing, str(settings_path))
object_merge(existing, settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as f:
f.writelines(
[f"{upperfy(k)} = {repr(v)}\n" for k, v in settings_data.items()]
)

View File

@ -0,0 +1,108 @@
from __future__ import annotations
from dynaconf.utils import build_env_list
from dynaconf.utils import upperfy
from dynaconf.utils.parse_conf import parse_conf_data
from dynaconf.utils.parse_conf import unparse_conf_data
try:
from redis import StrictRedis
except ImportError:
StrictRedis = None
IDENTIFIER = "redis"
def load(obj, env=None, silent=True, key=None):
"""Reads and loads in to "settings" a single key or all keys from redis
:param obj: the settings instance
:param env: settings env default='DYNACONF'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:return: None
"""
if StrictRedis is None:
raise ImportError(
"redis package is not installed in your environment. "
"`pip install dynaconf[redis]` or disable the redis loader with "
"export REDIS_ENABLED_FOR_DYNACONF=false"
)
redis = StrictRedis(**obj.get("REDIS_FOR_DYNACONF"))
prefix = obj.get("ENVVAR_PREFIX_FOR_DYNACONF")
# prefix is added to env_list to keep backwards compatibility
env_list = [prefix] + build_env_list(obj, env or obj.current_env)
for env_name in env_list:
holder = f"{prefix.upper()}_{env_name.upper()}"
try:
if key:
value = redis.hget(holder.upper(), key)
if value:
parsed_value = parse_conf_data(
value, tomlfy=True, box_settings=obj
)
if parsed_value:
obj.set(key, parsed_value)
else:
data = {
key: parse_conf_data(value, tomlfy=True, box_settings=obj)
for key, value in redis.hgetall(holder.upper()).items()
}
if data:
obj.update(data, loader_identifier=IDENTIFIER)
except Exception:
if silent:
return False
raise
def write(obj, data=None, **kwargs):
"""Write a value in to loader source
:param obj: settings object
:param data: vars to be stored
:param kwargs: vars to be stored
:return:
"""
if obj.REDIS_ENABLED_FOR_DYNACONF is False:
raise RuntimeError(
"Redis is not configured \n"
"export REDIS_ENABLED_FOR_DYNACONF=true\n"
"and configure the REDIS_*_FOR_DYNACONF variables"
)
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF").upper()
# add env to holder
holder = f"{holder}_{obj.current_env.upper()}"
data = data or {}
data.update(kwargs)
if not data:
raise AttributeError("Data must be provided")
redis_data = {
upperfy(key): unparse_conf_data(value) for key, value in data.items()
}
client.hmset(holder.upper(), redis_data)
load(obj)
def delete(obj, key=None):
"""
Delete a single key if specified, or all env if key is none
:param obj: settings object
:param key: key to delete from store location
:return: None
"""
client = StrictRedis(**obj.REDIS_FOR_DYNACONF)
holder = obj.get("ENVVAR_PREFIX_FOR_DYNACONF").upper()
# add env to holder
holder = f"{holder}_{obj.current_env.upper()}"
if key:
client.hdel(holder.upper(), upperfy(key))
obj.unset(key)
else:
keys = client.hkeys(holder.upper())
client.delete(holder.upper())
obj.unset_all(keys)

View File

@ -0,0 +1,122 @@
from __future__ import annotations
import warnings
from pathlib import Path
from dynaconf import default_settings
from dynaconf.constants import TOML_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.vendor import toml # Backwards compatibility with uiri/toml
from dynaconf.vendor import tomllib # New tomllib stdlib on py3.11
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
try:
loader = BaseLoader(
obj=obj,
env=env,
identifier="toml",
extensions=TOML_EXTENSIONS,
file_reader=tomllib.load,
string_reader=tomllib.loads,
opener_params={"mode": "rb"},
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
except UnicodeDecodeError: # pragma: no cover
"""
NOTE: Compat functions exists to keep backwards compatibility with
the new tomllib library. The old library was called `toml` and
the new one is called `tomllib`.
The old lib uiri/toml allowed unicode characters and re-added files
as string.
The new tomllib (stdlib) does not allow unicode characters, only
utf-8 encoded, and read files as binary.
NOTE: In dynaconf 4.0.0 we will drop support for the old library
removing the compat functions and calling directly the new lib.
"""
loader = BaseLoader(
obj=obj,
env=env,
identifier="toml",
extensions=TOML_EXTENSIONS,
file_reader=toml.load,
string_reader=toml.loads,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
try: # tomllib first
with open(str(settings_path), "rb") as open_file:
object_merge(tomllib.load(open_file), settings_data)
except UnicodeDecodeError: # pragma: no cover
# uiri/toml fallback (TBR on 4.0.0)
with open(
str(settings_path),
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
object_merge(toml.load(open_file), settings_data)
try: # tomllib first
with open(str(settings_path), "wb") as open_file:
tomllib.dump(encode_nulls(settings_data), open_file)
except UnicodeEncodeError: # pragma: no cover
# uiri/toml fallback (TBR on 4.0.0)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
toml.dump(encode_nulls(settings_data), open_file)
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
)
def encode_nulls(data):
"""TOML does not support `None` so this function transforms to '@none '."""
if data is None:
return "@none "
if isinstance(data, dict):
return {key: encode_nulls(value) for key, value in data.items()}
elif isinstance(data, (list, tuple)):
return [encode_nulls(item) for item in data]
return data

View File

@ -0,0 +1,186 @@
# docker run -e 'VAULT_DEV_ROOT_TOKEN_ID=myroot' -p 8200:8200 vault
# pip install hvac
from __future__ import annotations
from dynaconf.utils import build_env_list
from dynaconf.utils.parse_conf import parse_conf_data
try:
import boto3
except ImportError:
boto3 = None
try:
from hvac import Client
from hvac.exceptions import InvalidPath
except ImportError:
raise ImportError(
"vault package is not installed in your environment. "
"`pip install dynaconf[vault]` or disable the vault loader with "
"export VAULT_ENABLED_FOR_DYNACONF=false"
)
IDENTIFIER = "vault"
# backwards compatibility
_get_env_list = build_env_list
def get_client(obj):
client = Client(
**{k: v for k, v in obj.VAULT_FOR_DYNACONF.items() if v is not None}
)
if obj.VAULT_ROLE_ID_FOR_DYNACONF is not None:
client.auth.approle.login(
role_id=obj.VAULT_ROLE_ID_FOR_DYNACONF,
secret_id=obj.get("VAULT_SECRET_ID_FOR_DYNACONF"),
)
elif obj.VAULT_ROOT_TOKEN_FOR_DYNACONF is not None:
client.token = obj.VAULT_ROOT_TOKEN_FOR_DYNACONF
elif obj.VAULT_AUTH_WITH_IAM_FOR_DYNACONF:
if boto3 is None:
raise ImportError(
"boto3 package is not installed in your environment. "
"`pip install boto3` or disable the VAULT_AUTH_WITH_IAM"
)
session = boto3.Session()
credentials = session.get_credentials()
client.auth.aws.iam_login(
credentials.access_key,
credentials.secret_key,
credentials.token,
role=obj.VAULT_AUTH_ROLE_FOR_DYNACONF,
)
assert client.is_authenticated(), (
"Vault authentication error: is VAULT_TOKEN_FOR_DYNACONF or "
"VAULT_ROLE_ID_FOR_DYNACONF defined?"
)
client.secrets.kv.default_kv_version = obj.VAULT_KV_VERSION_FOR_DYNACONF
return client
def load(obj, env=None, silent=None, key=None):
"""Reads and loads in to "settings" a single key or all keys from vault
:param obj: the settings instance
:param env: settings env default='DYNACONF'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:return: None
"""
client = get_client(obj)
try:
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 2:
dirs = client.secrets.kv.v2.list_secrets(
path=obj.VAULT_PATH_FOR_DYNACONF,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)["data"]["keys"]
else:
dirs = client.secrets.kv.v1.list_secrets(
path=obj.VAULT_PATH_FOR_DYNACONF,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)["data"]["keys"]
except InvalidPath:
# The given path is not a directory
dirs = []
# First look for secrets into environments less store
if not obj.ENVIRONMENTS_FOR_DYNACONF:
# By adding '', dynaconf will now read secrets from environments-less
# store which are not written by `dynaconf write` to Vault store
env_list = [obj.MAIN_ENV_FOR_DYNACONF.lower(), ""]
# Finally, look for secret into all the environments
else:
env_list = dirs + build_env_list(obj, env)
for env in env_list:
path = "/".join([obj.VAULT_PATH_FOR_DYNACONF, env])
try:
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 2:
data = client.secrets.kv.v2.read_secret_version(
path, mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF
)
else:
data = client.secrets.kv.read_secret(
"data/" + path,
mount_point=obj.VAULT_MOUNT_POINT_FOR_DYNACONF,
)
except InvalidPath:
# If the path doesn't exist, ignore it and set data to None
data = None
if data:
# There seems to be a data dict within a data dict,
# extract the inner data
data = data.get("data", {}).get("data", {})
try:
if (
obj.VAULT_KV_VERSION_FOR_DYNACONF == 2
and obj.ENVIRONMENTS_FOR_DYNACONF
and data
):
data = data.get("data", {})
if data and key:
value = parse_conf_data(
data.get(key), tomlfy=True, box_settings=obj
)
if value:
obj.set(key, value)
elif data:
obj.update(data, loader_identifier=IDENTIFIER, tomlfy=True)
except Exception:
if silent:
return False
raise
def write(obj, data=None, **kwargs):
"""Write a value in to loader source
:param obj: settings object
:param data: vars to be stored
:param kwargs: vars to be stored
:return:
"""
if obj.VAULT_ENABLED_FOR_DYNACONF is False:
raise RuntimeError(
"Vault is not configured \n"
"export VAULT_ENABLED_FOR_DYNACONF=true\n"
"and configure the VAULT_FOR_DYNACONF_* variables"
)
data = data or {}
data.update(kwargs)
if not data:
raise AttributeError("Data must be provided")
data = {"data": data}
client = get_client(obj)
if obj.VAULT_KV_VERSION_FOR_DYNACONF == 1:
mount_point = obj.VAULT_MOUNT_POINT_FOR_DYNACONF + "/data"
else:
mount_point = obj.VAULT_MOUNT_POINT_FOR_DYNACONF
path = "/".join([obj.VAULT_PATH_FOR_DYNACONF, obj.current_env.lower()])
client.secrets.kv.create_or_update_secret(
path, secret=data, mount_point=mount_point
)
load(obj)
def list_envs(obj, path=""):
"""
This function is a helper to get a list of all the existing envs in
the source of data, the use case is:
existing_envs = vault_loader.list_envs(settings)
for env in exiting_envs:
with settings.using_env(env): # switch to the env
# do something with a key of that env
:param obj: settings object
:param path: path to the vault secrets
:return: list containing all the keys at the given path
"""
client = get_client(obj)
path = path or obj.get("VAULT_PATH_FOR_DYNACONF")
try:
return client.list(f"/secret/metadata/{path}")["data"]["keys"]
except TypeError:
return []

View File

@ -0,0 +1,87 @@
from __future__ import annotations
import io
from pathlib import Path
from warnings import warn
from dynaconf import default_settings
from dynaconf.constants import YAML_EXTENSIONS
from dynaconf.loaders.base import BaseLoader
from dynaconf.utils import object_merge
from dynaconf.utils.parse_conf import try_to_encode
from dynaconf.vendor.ruamel import yaml
# Add support for Dynaconf Lazy values to YAML dumper
yaml.SafeDumper.yaml_representers[
None
] = lambda self, data: yaml.representer.SafeRepresenter.represent_str(
self, try_to_encode(data)
)
def load(obj, env=None, silent=True, key=None, filename=None):
"""
Reads and loads in to "obj" a single key or all keys from source file.
:param obj: the settings instance
:param env: settings current env default='development'
:param silent: if errors should raise
:param key: if defined load a single key, else load all in env
:param filename: Optional custom filename to load
:return: None
"""
# Resolve the loaders
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# Possible values are `safe_load, full_load, unsafe_load, load`
yaml_reader = getattr(
yaml, obj.get("YAML_LOADER_FOR_DYNACONF"), yaml.safe_load
)
if yaml_reader.__name__ == "unsafe_load": # pragma: no cover
warn(
"yaml.unsafe_load is deprecated."
" Please read https://msg.pyyaml.org/load for full details."
" Try to use full_load or safe_load."
)
loader = BaseLoader(
obj=obj,
env=env,
identifier="yaml",
extensions=YAML_EXTENSIONS,
file_reader=yaml_reader,
string_reader=yaml_reader,
)
loader.load(
filename=filename,
key=key,
silent=silent,
)
def write(settings_path, settings_data, merge=True):
"""Write data to a settings file.
:param settings_path: the filepath
:param settings_data: a dictionary with data
:param merge: boolean if existing file should be merged with new data
"""
settings_path = Path(settings_path)
if settings_path.exists() and merge: # pragma: no cover
with open(
str(settings_path), encoding=default_settings.ENCODING_FOR_DYNACONF
) as open_file:
object_merge(yaml.safe_load(open_file), settings_data)
with open(
str(settings_path),
"w",
encoding=default_settings.ENCODING_FOR_DYNACONF,
) as open_file:
yaml.dump(
settings_data,
open_file,
Dumper=yaml.dumper.SafeDumper,
explicit_start=True,
indent=2,
default_flow_style=False,
)

View File

View File

@ -0,0 +1,19 @@
from __future__ import annotations
from dynaconf.utils import upperfy
class PrefixFilter:
def __init__(self, prefix):
if not isinstance(prefix, str):
raise TypeError("`SETTINGS_FILE_PREFIX` must be str")
self.prefix = f"{upperfy(prefix)}_"
def __call__(self, data):
"""Filter incoming data by prefix"""
len_prefix = len(self.prefix)
return {
upperfy(key[len_prefix:]): value
for key, value in data.items()
if upperfy(key[:len_prefix]) == self.prefix
}

View File

@ -0,0 +1,8 @@
# pragma: no cover
from __future__ import annotations
TESTING = True
LOADERS_FOR_DYNACONF = [
"dynaconf.loaders.env_loader",
# 'dynaconf.loaders.redis_loader'
]

View File

@ -0,0 +1,461 @@
from __future__ import annotations
import os
import warnings
from collections import defaultdict
from json import JSONDecoder
from typing import Any
from typing import Iterator
from typing import TYPE_CHECKING
if TYPE_CHECKING: # pragma: no cover
from dynaconf.utils.boxing import DynaBox
from dynaconf.base import LazySettings, Settings
BANNER = """
"""
if os.name == "nt": # pragma: no cover
# windows can't handle the above charmap
BANNER = "DYNACONF"
def object_merge(
old: Any, new: Any, unique: bool = False, full_path: list[str] = None
) -> Any:
"""
Recursively merge two data structures, new is mutated in-place.
:param old: The existing data.
:param new: The new data to get old values merged in to.
:param unique: When set to True existing list items are not set.
:param full_path: Indicates the elements of a tree.
"""
if full_path is None:
full_path = []
if old == new or old is None or new is None:
# Nothing to merge
return new
if isinstance(old, list) and isinstance(new, list):
# 726: allow local_merge to override global merge on lists
if "dynaconf_merge_unique" in new:
new.remove("dynaconf_merge_unique")
unique = True
for item in old[::-1]:
if unique and item in new:
continue
new.insert(0, item)
if isinstance(old, dict) and isinstance(new, dict):
existing_value = recursive_get(old, full_path) # doesn't handle None
# Need to make every `None` on `_store` to be an wrapped `LazyNone`
# data coming from source, in `new` can be mix case: KEY4|key4|Key4
# data existing on `old` object has the correct case: key4|KEY4|Key4
# So we need to ensure that new keys matches the existing keys
for new_key in list(new.keys()):
correct_case_key = find_the_correct_casing(new_key, old)
if correct_case_key:
new[correct_case_key] = new.pop(new_key)
for old_key, value in old.items():
# This is for when the dict exists internally
# but the new value on the end of full path is the same
if (
existing_value is not None
and old_key.lower() == full_path[-1].lower()
and existing_value is value
):
# Here Be The Dragons
# This comparison needs to be smarter
continue
if old_key not in new:
new[old_key] = value
else:
object_merge(
value,
new[old_key],
full_path=full_path[1:] if full_path else None,
)
handle_metavalues(old, new)
return new
def recursive_get(
obj: DynaBox | dict[str, int] | dict[str, str | int],
names: list[str] | None,
) -> Any:
"""Given a dot accessible object and a list of names `foo.bar.zaz`
gets recursively all names one by one obj.foo.bar.zaz.
"""
if not names:
return
head, *tail = names
result = getattr(obj, head, None)
if not tail:
return result
return recursive_get(result, tail)
def handle_metavalues(
old: DynaBox | dict[str, int] | dict[str, str | int], new: Any
) -> None:
"""Cleanup of MetaValues on new dict"""
for key in list(new.keys()):
# MetaValue instances
if getattr(new[key], "_dynaconf_reset", False): # pragma: no cover
# a Reset on `new` triggers reasign of existing data
new[key] = new[key].unwrap()
elif getattr(new[key], "_dynaconf_del", False):
# a Del on `new` triggers deletion of existing data
new.pop(key, None)
old.pop(key, None)
elif getattr(new[key], "_dynaconf_merge", False):
# a Merge on `new` triggers merge with existing data
new[key] = object_merge(
old.get(key), new[key].unwrap(), unique=new[key].unique
)
# Data structures containing merge tokens
if isinstance(new.get(key), (list, tuple)):
has_merge = "dynaconf_merge" in new[key]
has_merge_unique = "dynaconf_merge_unique" in new[key]
if has_merge or has_merge_unique:
value = list(new[key])
unique = False
try:
value.remove("dynaconf_merge")
except ValueError:
value.remove("dynaconf_merge_unique")
unique = True
for item in old.get(key)[::-1]:
if unique and item in value:
continue
value.insert(0, item)
new[key] = value
elif isinstance(new.get(key), dict):
local_merge = new[key].pop(
"dynaconf_merge", new[key].pop("dynaconf_merge_unique", None)
)
if local_merge not in (True, False, None) and not new[key]:
# In case `dynaconf_merge:` holds value not boolean - ref #241
new[key] = local_merge
if local_merge:
new[key] = object_merge(old.get(key), new[key])
class DynaconfDict(dict):
"""A dict representing en empty Dynaconf object
useful to run loaders in to a dict for testing"""
def __init__(self, *args, **kwargs):
self._fresh = False
self._loaded_envs = []
self._loaded_hooks = defaultdict(dict)
self._loaded_py_modules = []
self._loaded_files = []
self._deleted = set()
self._store = {}
self._env_cache = {}
self._loaded_by_loaders = {}
self._loaders = []
self._defaults = {}
self.environ = os.environ
self.SETTINGS_MODULE = None
self.filter_strategy = kwargs.get("filter_strategy", None)
self._not_installed_warnings = []
self._validate_only = kwargs.pop("validate_only", None)
self._validate_exclude = kwargs.pop("validate_exclude", None)
super().__init__(*args, **kwargs)
def set(self, key: str, value: str, *args, **kwargs) -> None:
self[key] = value
@staticmethod
def get_environ(key, default=None): # pragma: no cover
return os.environ.get(key, default)
def exists(self, key: str, **kwargs) -> bool:
return self.get(key, missing) is not missing
RENAMED_VARS = {
# old: new
"DYNACONF_NAMESPACE": "ENV_FOR_DYNACONF",
"NAMESPACE_FOR_DYNACONF": "ENV_FOR_DYNACONF",
"DYNACONF_SETTINGS_MODULE": "SETTINGS_FILE_FOR_DYNACONF",
"DYNACONF_SETTINGS": "SETTINGS_FILE_FOR_DYNACONF",
"SETTINGS_MODULE": "SETTINGS_FILE_FOR_DYNACONF",
"SETTINGS_MODULE_FOR_DYNACONF": "SETTINGS_FILE_FOR_DYNACONF",
"PROJECT_ROOT": "ROOT_PATH_FOR_DYNACONF",
"PROJECT_ROOT_FOR_DYNACONF": "ROOT_PATH_FOR_DYNACONF",
"DYNACONF_SILENT_ERRORS": "SILENT_ERRORS_FOR_DYNACONF",
"DYNACONF_ALWAYS_FRESH_VARS": "FRESH_VARS_FOR_DYNACONF",
"BASE_NAMESPACE_FOR_DYNACONF": "DEFAULT_ENV_FOR_DYNACONF",
"GLOBAL_ENV_FOR_DYNACONF": "ENVVAR_PREFIX_FOR_DYNACONF",
}
def compat_kwargs(kwargs: dict[str, Any]) -> None:
"""To keep backwards compat change the kwargs to new names"""
warn_deprecations(kwargs)
for old, new in RENAMED_VARS.items():
if old in kwargs:
kwargs[new] = kwargs[old]
# update cross references
for c_old, c_new in RENAMED_VARS.items():
if c_new == new:
kwargs[c_old] = kwargs[new]
class Missing:
"""
Sentinel value object/singleton used to differentiate between ambiguous
situations where `None` is a valid value.
"""
def __bool__(self) -> bool:
"""Respond to boolean duck-typing."""
return False
def __eq__(self, other: DynaBox | Missing) -> bool:
"""Equality check for a singleton."""
return isinstance(other, self.__class__)
# Ensure compatibility with Python 2.x
__nonzero__ = __bool__
def __repr__(self) -> str:
"""
Unambiguously identify this string-based representation of Missing,
used as a singleton.
"""
return "<dynaconf.missing>"
missing = Missing()
def deduplicate(list_object: list[str]) -> list[str]:
"""Rebuild `list_object` removing duplicated and keeping order"""
new = []
for item in list_object:
if item not in new:
new.append(item)
return new
def warn_deprecations(data: Any) -> None:
for old, new in RENAMED_VARS.items():
if old in data:
warnings.warn(
f"You are using {old} which is a deprecated settings "
f"replace it with {new}",
DeprecationWarning,
)
def trimmed_split(
s: str, seps: str | tuple[str, str] = (";", ",")
) -> list[str]:
"""Given a string s, split is by one of one of the seps."""
for sep in seps:
if sep not in s:
continue
data = [item.strip() for item in s.strip().split(sep)]
return data
return [s] # raw un-splitted
def ensure_a_list(data: Any) -> list[int] | list[str]:
"""Ensure data is a list or wrap it in a list"""
if not data:
return []
if isinstance(data, (list, tuple, set)):
return list(data)
if isinstance(data, str):
data = trimmed_split(data) # settings.toml,other.yaml
return data
return [data]
def build_env_list(obj: Settings | LazySettings, env: str | None) -> list[str]:
"""Build env list for loaders to iterate.
Arguments:
obj {LazySettings} -- A Dynaconf settings instance
env {str} -- The current env to be loaded
Returns:
[str] -- A list of string names of the envs to load.
"""
# add the [default] env
env_list = [(obj.get("DEFAULT_ENV_FOR_DYNACONF") or "default").lower()]
# compatibility with older versions that still uses [dynaconf] as
# [default] env
global_env = (obj.get("ENVVAR_PREFIX_FOR_DYNACONF") or "dynaconf").lower()
if global_env not in env_list:
env_list.append(global_env)
# add the current env
current_env = obj.current_env
if current_env and current_env.lower() not in env_list:
env_list.append(current_env.lower())
# add a manually set env
if env and env.lower() not in env_list:
env_list.append(env.lower())
# add the [global] env
env_list.append("global")
return env_list
def upperfy(key: str) -> str:
"""Receive a string key and returns its upper version.
Example:
input: foo
output: FOO
input: foo_bar
output: FOO_BAR
input: foo__bar__ZAZ
output: FOO__bar__ZAZ
Arguments:
key {str} -- A string key that may contain dunders `__`
Returns:
The key as upper case but keeping the nested elements.
"""
key = str(key)
if "__" in key:
parts = key.split("__")
return "__".join([parts[0].upper()] + parts[1:])
return key.upper()
def multi_replace(text: str, patterns: dict[str, str]) -> str:
"""Replaces multiple pairs in a string
Arguments:
text {str} -- A "string text"
patterns {dict} -- A dict of {"old text": "new text"}
Returns:
text -- str
"""
for old, new in patterns.items():
text = text.replace(old, new)
return text
def extract_json_objects(
text: str, decoder: JSONDecoder = JSONDecoder()
) -> Iterator[dict[str, int | dict[Any, Any]]]:
"""Find JSON objects in text, and yield the decoded JSON data
Does not attempt to look for JSON arrays, text, or other JSON types outside
of a parent JSON object.
"""
pos = 0
while True:
match = text.find("{", pos)
if match == -1:
break
try:
result, index = decoder.raw_decode(text[match:])
yield result
pos = match + index
except ValueError:
pos = match + 1
def recursively_evaluate_lazy_format(
value: Any, settings: Settings | LazySettings
) -> Any:
"""Given a value as a data structure, traverse all its members
to find Lazy values and evaluate it.
For example: Evaluate values inside lists and dicts
"""
if getattr(value, "_dynaconf_lazy_format", None):
value = value(settings)
if isinstance(value, list):
# Keep the original type, can be a BoxList
value = value.__class__(
[
recursively_evaluate_lazy_format(item, settings)
for item in value
]
)
return value
def isnamedtupleinstance(value):
"""Check if value is a namedtuple instance
stackoverflow.com/questions/2166818/
how-to-check-if-an-object-is-an-instance-of-a-namedtuple
"""
t = type(value)
b = t.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(t, "_fields", None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def find_the_correct_casing(key: str, data: dict[str, Any]) -> str | None:
"""Given a key, find the proper casing in data
Arguments:
key {str} -- A key to be searched in data
data {dict} -- A dict to be searched
Returns:
str -- The proper casing of the key in data
"""
if key in data:
return key
for k in data.keys():
if k.lower() == key.lower():
return k
if k.replace(" ", "_").lower() == key.lower():
return k
return None

View File

@ -0,0 +1,81 @@
from __future__ import annotations
import inspect
from functools import wraps
from dynaconf.utils import find_the_correct_casing
from dynaconf.utils import recursively_evaluate_lazy_format
from dynaconf.utils.functional import empty
from dynaconf.vendor.box import Box
def evaluate_lazy_format(f):
"""Marks a method on Dynabox instance to
lazily evaluate LazyFormat objects upon access."""
@wraps(f)
def evaluate(dynabox, item, *args, **kwargs):
value = f(dynabox, item, *args, **kwargs)
settings = dynabox._box_config["box_settings"]
if getattr(value, "_dynaconf_lazy_format", None):
dynabox._box_config[
f"raw_{item.lower()}"
] = f"@{value.formatter.token} {value.value}"
return recursively_evaluate_lazy_format(value, settings)
return evaluate
class DynaBox(Box):
"""Specialized Box for dynaconf
it allows items/attrs to be found both in upper or lower case"""
@evaluate_lazy_format
def __getattr__(self, item, *args, **kwargs):
try:
return super().__getattr__(item, *args, **kwargs)
except (AttributeError, KeyError):
n_item = find_the_correct_casing(item, self) or item
return super().__getattr__(n_item, *args, **kwargs)
@evaluate_lazy_format
def __getitem__(self, item, *args, **kwargs):
try:
return super().__getitem__(item, *args, **kwargs)
except (AttributeError, KeyError):
n_item = find_the_correct_casing(item, self) or item
return super().__getitem__(n_item, *args, **kwargs)
def __copy__(self):
return self.__class__(
super(Box, self).copy(),
box_settings=self._box_config.get("box_settings"),
)
def copy(self):
return self.__class__(
super(Box, self).copy(),
box_settings=self._box_config.get("box_settings"),
)
@evaluate_lazy_format
def get(self, item, default=None, *args, **kwargs):
n_item = find_the_correct_casing(item, self) or item
value = super().get(n_item, empty, *args, **kwargs)
return value if value is not empty else default
def __dir__(self):
keys = list(self.keys())
reserved = [
item[0]
for item in inspect.getmembers(DynaBox)
if not item[0].startswith("__")
]
return (
keys
+ [k.lower() for k in keys]
+ [k.upper() for k in keys]
+ reserved
)

View File

@ -0,0 +1,112 @@
from __future__ import annotations
import inspect
import io
import os
from dynaconf.utils import deduplicate
def _walk_to_root(path, break_at=None):
"""
Directories starting from the given directory up to the root or break_at
"""
if not os.path.exists(path): # pragma: no cover
raise OSError("Starting path not found")
if os.path.isfile(path): # pragma: no cover
path = os.path.dirname(path)
last_dir = None
current_dir = os.path.abspath(path)
paths = []
while last_dir != current_dir:
paths.append(current_dir)
paths.append(os.path.join(current_dir, "config"))
if break_at and current_dir == os.path.abspath(break_at): # noqa
break
parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))
last_dir, current_dir = current_dir, parent_dir
return paths
SEARCHTREE = []
def find_file(filename=".env", project_root=None, skip_files=None, **kwargs):
"""Search in increasingly higher folders for the given file
Returns path to the file if found, or an empty string otherwise.
This function will build a `search_tree` based on:
- Project_root if specified
- Invoked script location and its parents until root
- Current working directory
For each path in the `search_tree` it will also look for an
additional `./config` folder.
"""
search_tree = []
try:
work_dir = os.getcwd()
except FileNotFoundError:
return ""
skip_files = skip_files or []
# If filename is an absolute path and exists, just return it
# if the absolute path does not exist, return empty string so
# that it can be joined and avoid IoError
if os.path.isabs(filename):
return filename if os.path.exists(filename) else ""
if project_root is not None:
search_tree.extend(_walk_to_root(project_root, break_at=work_dir))
script_dir = os.path.dirname(os.path.abspath(inspect.stack()[-1].filename))
# Path to invoked script and recursively to root with its ./config dirs
search_tree.extend(_walk_to_root(script_dir))
# Path to where Python interpreter was invoked and recursively to root
search_tree.extend(_walk_to_root(work_dir))
# Don't look the same place twice
search_tree = deduplicate(search_tree)
global SEARCHTREE
SEARCHTREE[:] = search_tree
for dirname in search_tree:
check_path = os.path.join(dirname, filename)
if check_path in skip_files:
continue
if os.path.exists(check_path):
return check_path # First found will return
# return empty string if not found so it can still be joined in os.path
return ""
def read_file(path, **kwargs):
content = ""
with open(path, **kwargs) as open_file:
content = open_file.read().strip()
return content
def get_local_filename(filename):
"""Takes a filename like `settings.toml` and returns `settings.local.toml`
Arguments:
filename {str} -- The filename or complete path
Returns:
[str] -- The same name or path with `.local.` added.
"""
name, _, extension = os.path.basename(str(filename)).rpartition(
os.path.extsep
)
return os.path.join(
os.path.dirname(str(filename)), f"{name}.local.{extension}"
)

View File

@ -0,0 +1,136 @@
from __future__ import annotations
import copy
import operator
class Empty:
def __str__(self):
return "EMPTY"
empty = Empty()
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject:
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation.
"""
# Avoid infinite recursion when tracing __init__.
_wrapped = None
_kwargs = None
_django_override = False
def __init__(self):
# Note: if a subclass overrides __init__(), it will likely need to
# override __copy__() and __deepcopy__() as well.
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name in ["_wrapped", "_kwargs", "_warn_dynaconf_global_settings"]:
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__[name] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name in ["_wrapped", "_kwargs"]:
raise TypeError(f"can't delete {name}.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError(
"subclasses of LazyObject must provide a _setup() method"
)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
# object to successfully pickle it, so we might as well just pickle the
# wrapped object since they're supposed to act the same way.
#
# Unfortunately, if we try to simply act like the wrapped object, the ruse
# will break down when pickle gets our id(). Thus we end up with pickle
# thinking, in effect, that we are a distinct object from the wrapped
# object, but with the same __dict__. This can cause problems (see #25389).
#
# So instead, we define our own __reduce__ method and custom unpickler. We
# pickle the wrapped object as the unpickler's argument, so that pickle
# will pickle it normally, and then the unpickler simply returns its
# argument.
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use type(self), not
# self.__class__, because the latter is proxied.
return type(self)()
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__lt__ = new_method_proxy(operator.lt)
__gt__ = new_method_proxy(operator.gt)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
def unpickle_lazyobject(wrapped):
"""
Used to unpickle lazy objects. Just return its argument, which will be the
wrapped object.
"""
return wrapped

View File

@ -0,0 +1,401 @@
from __future__ import annotations
import json
import os
import re
import warnings
from functools import wraps
from dynaconf.utils import extract_json_objects
from dynaconf.utils import isnamedtupleinstance
from dynaconf.utils import multi_replace
from dynaconf.utils import recursively_evaluate_lazy_format
from dynaconf.utils.boxing import DynaBox
from dynaconf.utils.functional import empty
from dynaconf.vendor import toml
from dynaconf.vendor import tomllib
try:
from jinja2 import Environment
jinja_env = Environment()
for p_method in ("abspath", "realpath", "relpath", "dirname", "basename"):
jinja_env.filters[p_method] = getattr(os.path, p_method)
except ImportError: # pragma: no cover
jinja_env = None
true_values = ("t", "true", "enabled", "1", "on", "yes", "True")
false_values = ("f", "false", "disabled", "0", "off", "no", "False", "")
KV_PATTERN = re.compile(r"([a-zA-Z0-9 ]*=[a-zA-Z0-9\- :]*)")
"""matches `a=b, c=d, e=f` used on `VALUE='@merge foo=bar'` variables."""
class DynaconfParseError(Exception):
"""Error to raise when parsing @casts"""
class MetaValue:
"""A Marker to trigger specific actions on `set` and `object_merge`"""
_meta_value = True
def __init__(self, value, box_settings):
self.box_settings = box_settings
self.value = parse_conf_data(
value, tomlfy=True, box_settings=box_settings
)
def __repr__(self):
return f"{self.__class__.__name__}({self.value}) on {id(self)}"
def unwrap(self):
return self.value
class Reset(MetaValue):
"""Triggers an existing key to be reset to its value
NOTE: DEPRECATED on v3.0.0
"""
_dynaconf_reset = True
def __init__(self, value, box_settings):
self.box_settings = box_settings
self.value = parse_conf_data(
value, tomlfy=True, box_settings=self.box_settings
)
warnings.warn(f"{self.value} does not need `@reset` anymore.")
class Del(MetaValue):
"""Triggers an existing key to be deleted"""
_dynaconf_del = True
def unwrap(self):
raise ValueError("Del object has no value")
class Merge(MetaValue):
"""Triggers an existing key to be merged"""
_dynaconf_merge = True
def __init__(self, value, box_settings, unique=False):
if unique:
self._dynaconf_merge_unique = True
self.box_settings = box_settings
self.value = parse_conf_data(
value, tomlfy=True, box_settings=box_settings
)
if isinstance(self.value, (int, float, bool)):
# @merge 1, @merge 1.1, @merge False
self.value = [self.value]
elif isinstance(self.value, str):
# @merge {"valid": "json"}
json_object = list(
extract_json_objects(
multi_replace(
self.value,
{
": True": ": true",
":True": ": true",
": False": ": false",
":False": ": false",
": None": ": null",
":None": ": null",
},
)
)
)
if len(json_object) == 1:
self.value = json_object[0]
else:
matches = KV_PATTERN.findall(self.value)
# a=b, c=d
if matches:
self.value = {
k.strip(): parse_conf_data(
v, tomlfy=True, box_settings=box_settings
)
for k, v in (
match.strip().split("=") for match in matches
)
}
elif "," in self.value:
# @merge foo,bar
self.value = self.value.split(",")
else:
# @merge foo
self.value = [self.value]
self.unique = unique
class BaseFormatter:
def __init__(self, function, token):
self.function = function
self.token = token
def __call__(self, value, **context):
return self.function(value, **context)
def __str__(self):
return str(self.token)
def _jinja_formatter(value, **context):
if jinja_env is None: # pragma: no cover
raise ImportError(
"jinja2 must be installed to enable '@jinja' settings in dynaconf"
)
return jinja_env.from_string(value).render(**context)
class Formatters:
"""Dynaconf builtin formatters"""
python_formatter = BaseFormatter(str.format, "format")
jinja_formatter = BaseFormatter(_jinja_formatter, "jinja")
class Lazy:
"""Holds data to format lazily."""
_dynaconf_lazy_format = True
def __init__(
self, value=empty, formatter=Formatters.python_formatter, casting=None
):
self.value = value
self.formatter = formatter
self.casting = casting
@property
def context(self):
"""Builds a context for formatting."""
return {"env": os.environ, "this": self.settings}
def __call__(self, settings, validator_object=None):
"""LazyValue triggers format lazily."""
self.settings = settings
self.context["_validator_object"] = validator_object
result = self.formatter(self.value, **self.context)
if self.casting is not None:
result = self.casting(result)
return result
def __str__(self):
"""Gives string representation for the object."""
return str(self.value)
def __repr__(self):
"""Give the quoted str representation"""
return f"'@{self.formatter} {self.value}'"
def _dynaconf_encode(self):
"""Encodes this object values to be serializable to json"""
return f"@{self.formatter} {self.value}"
def set_casting(self, casting):
"""Set the casting and return the instance."""
self.casting = casting
return self
def try_to_encode(value, callback=str):
"""Tries to encode a value by verifying existence of `_dynaconf_encode`"""
try:
return value._dynaconf_encode()
except (AttributeError, TypeError):
return callback(value)
def evaluate_lazy_format(f):
"""Marks a method on Settings instance to
lazily evaluate LazyFormat objects upon access."""
@wraps(f)
def evaluate(settings, *args, **kwargs):
value = f(settings, *args, **kwargs)
return recursively_evaluate_lazy_format(value, settings)
return evaluate
converters = {
"@str": lambda value: value.set_casting(str)
if isinstance(value, Lazy)
else str(value),
"@int": lambda value: value.set_casting(int)
if isinstance(value, Lazy)
else int(value),
"@float": lambda value: value.set_casting(float)
if isinstance(value, Lazy)
else float(value),
"@bool": lambda value: value.set_casting(
lambda x: str(x).lower() in true_values
)
if isinstance(value, Lazy)
else str(value).lower() in true_values,
"@json": lambda value: value.set_casting(
lambda x: json.loads(x.replace("'", '"'))
)
if isinstance(value, Lazy)
else json.loads(value),
"@format": lambda value: Lazy(value),
"@jinja": lambda value: Lazy(value, formatter=Formatters.jinja_formatter),
# Meta Values to trigger pre assignment actions
"@reset": Reset, # @reset is DEPRECATED on v3.0.0
"@del": Del,
"@merge": Merge,
"@merge_unique": lambda value, box_settings: Merge(
value, box_settings, unique=True
),
# Special markers to be used as placeholders e.g: in prefilled forms
# will always return None when evaluated
"@note": lambda value: None,
"@comment": lambda value: None,
"@null": lambda value: None,
"@none": lambda value: None,
"@empty": lambda value: empty,
}
def get_converter(converter_key, value, box_settings):
converter = converters[converter_key]
try:
converted_value = converter(value, box_settings=box_settings)
except TypeError:
converted_value = converter(value)
return converted_value
def parse_with_toml(data):
"""Uses TOML syntax to parse data"""
try: # try tomllib first
try:
return tomllib.loads(f"key={data}")["key"]
except (tomllib.TOMLDecodeError, KeyError):
return data
except UnicodeDecodeError: # pragma: no cover
# fallback to toml (TBR in 4.0.0)
try:
return toml.loads(f"key={data}")["key"]
except (toml.TomlDecodeError, KeyError):
return data
warnings.warn(
"TOML files should have only UTF-8 encoded characters. "
"starting on 4.0.0 dynaconf will stop allowing invalid chars.",
DeprecationWarning,
)
def _parse_conf_data(data, tomlfy=False, box_settings=None):
"""
@int @bool @float @json (for lists and dicts)
strings does not need converters
export DYNACONF_DEFAULT_THEME='material'
export DYNACONF_DEBUG='@bool True'
export DYNACONF_DEBUG_TOOLBAR_ENABLED='@bool False'
export DYNACONF_PAGINATION_PER_PAGE='@int 20'
export DYNACONF_MONGODB_SETTINGS='@json {"DB": "quokka_db"}'
export DYNACONF_ALLOWED_EXTENSIONS='@json ["jpg", "png"]'
"""
# not enforced to not break backwards compatibility with custom loaders
box_settings = box_settings or {}
castenabled = box_settings.get("AUTO_CAST_FOR_DYNACONF", empty)
if castenabled is empty:
castenabled = (
os.environ.get("AUTO_CAST_FOR_DYNACONF", "true").lower()
not in false_values
)
if (
castenabled
and data
and isinstance(data, str)
and data.startswith(tuple(converters.keys()))
):
# Check combination token is used
comb_token = re.match(
f"^({'|'.join(converters.keys())}) @(jinja|format)",
data,
)
if comb_token:
tokens = comb_token.group(0)
converter_key_list = tokens.split(" ")
value = data.replace(tokens, "").strip()
else:
parts = data.partition(" ")
converter_key_list = [parts[0]]
value = parts[-1]
# Parse the converters iteratively
for converter_key in converter_key_list[::-1]:
value = get_converter(converter_key, value, box_settings)
else:
value = parse_with_toml(data) if tomlfy else data
if isinstance(value, dict):
value = DynaBox(value, box_settings=box_settings)
return value
def parse_conf_data(data, tomlfy=False, box_settings=None):
# fix for https://github.com/dynaconf/dynaconf/issues/595
if isnamedtupleinstance(data):
return data
# not enforced to not break backwards compatibility with custom loaders
box_settings = box_settings or {}
if isinstance(data, (tuple, list)):
# recursively parse each sequence item
return [
parse_conf_data(item, tomlfy=tomlfy, box_settings=box_settings)
for item in data
]
if isinstance(data, (dict, DynaBox)):
# recursively parse inner dict items
_parsed = {}
for k, v in data.items():
_parsed[k] = parse_conf_data(
v, tomlfy=tomlfy, box_settings=box_settings
)
return _parsed
# return parsed string value
return _parse_conf_data(data, tomlfy=tomlfy, box_settings=box_settings)
def unparse_conf_data(value):
if isinstance(value, bool):
return f"@bool {value}"
if isinstance(value, int):
return f"@int {value}"
if isinstance(value, float):
return f"@float {value}"
if isinstance(value, (list, dict)):
return f"@json {json.dumps(value)}"
if isinstance(value, Lazy):
return try_to_encode(value)
if value is None:
return "@none "
return value

498
libs/dynaconf/validator.py Normal file
View File

@ -0,0 +1,498 @@
from __future__ import annotations
from collections import defaultdict
from itertools import chain
from types import MappingProxyType
from typing import Any
from typing import Callable
from typing import Sequence
from dynaconf import validator_conditions
from dynaconf.utils import ensure_a_list
from dynaconf.utils.functional import empty
EQUALITY_ATTRS = (
"names",
"must_exist",
"when",
"condition",
"operations",
"envs",
)
class ValidationError(Exception):
"""Raised when a validation fails"""
def __init__(self, message: str, *args, **kwargs):
self.details = kwargs.pop("details", [])
super().__init__(message, *args, **kwargs)
self.message = message
class Validator:
"""Validators are conditions attached to settings variables names
or patterns::
Validator('MESSAGE', must_exist=True, eq='Hello World')
The above ensure MESSAGE is available in default env and
is equal to 'Hello World'
`names` are a one (or more) names or patterns::
Validator('NAME')
Validator('NAME', 'OTHER_NAME', 'EVEN_OTHER')
Validator(r'^NAME', r'OTHER./*')
The `operations` are::
eq: value == other
ne: value != other
gt: value > other
lt: value < other
gte: value >= other
lte: value <= other
is_type_of: isinstance(value, type)
is_in: value in sequence
is_not_in: value not in sequence
identity: value is other
cont: contain value in
len_eq: len(value) == other
len_ne: len(value) != other
len_min: len(value) > other
len_max: len(value) < other
`env` is which env to be checked, can be a list or
default is used.
`when` holds a validator and its return decides if validator runs or not::
Validator('NAME', must_exist=True, when=Validator('OTHER', eq=2))
# NAME is required only if OTHER eq to 2
# When the very first thing to be performed when passed.
# if no env is passed to `when` it is inherited
`must_exist` is alias to `required` requirement. (executed after when)::
settings.get(value, empty) returns non empty
condition is a callable to be executed and return boolean::
Validator('NAME', condition=lambda x: x == 1)
# it is executed before operations.
"""
default_messages = MappingProxyType(
{
"must_exist_true": "{name} is required in env {env}",
"must_exist_false": "{name} cannot exists in env {env}",
"condition": "{name} invalid for {function}({value}) in env {env}",
"operations": (
"{name} must {operation} {op_value} "
"but it is {value} in env {env}"
),
"combined": "combined validators failed {errors}",
}
)
def __init__(
self,
*names: str,
must_exist: bool | None = None,
required: bool | None = None, # alias for `must_exist`
condition: Callable[[Any], bool] | None = None,
when: Validator | None = None,
env: str | Sequence[str] | None = None,
messages: dict[str, str] | None = None,
cast: Callable[[Any], Any] | None = None,
default: Any | Callable[[Any, Validator], Any] | None = empty,
description: str | None = None,
apply_default_on_none: bool | None = False,
**operations: Any,
) -> None:
# Copy immutable MappingProxyType as a mutable dict
self.messages = dict(self.default_messages)
if messages:
self.messages.update(messages)
if when is not None and not isinstance(when, Validator):
raise TypeError("when must be Validator instance")
if condition is not None and not callable(condition):
raise TypeError("condition must be callable")
self.names = names
self.must_exist = must_exist if must_exist is not None else required
self.condition = condition
self.when = when
self.cast = cast or (lambda value: value)
self.operations = operations
self.default = default
self.description = description
self.envs: Sequence[str] | None = None
self.apply_default_on_none = apply_default_on_none
# See #585
self.is_type_of = operations.get("is_type_of")
if isinstance(env, str):
self.envs = [env]
elif isinstance(env, (list, tuple)):
self.envs = env
def __or__(self, other: Validator) -> CombinedValidator:
return OrValidator(self, other, description=self.description)
def __and__(self, other: Validator) -> CombinedValidator:
return AndValidator(self, other, description=self.description)
def __eq__(self, other: object) -> bool:
if self is other:
return True
if type(self).__name__ != type(other).__name__:
return False
identical_attrs = (
getattr(self, attr) == getattr(other, attr)
for attr in EQUALITY_ATTRS
)
if all(identical_attrs):
return True
return False
def validate(
self,
settings: Any,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None:
"""Raise ValidationError if invalid"""
# If only or exclude are not set, this value always passes startswith
only = ensure_a_list(only or [""])
if only and not isinstance(only[0], str):
raise ValueError("'only' must be a string or list of strings.")
exclude = ensure_a_list(exclude)
if exclude and not isinstance(exclude[0], str):
raise ValueError("'exclude' must be a string or list of strings.")
if self.envs is None:
self.envs = [settings.current_env]
if self.when is not None:
try:
# inherit env if not defined
if self.when.envs is None:
self.when.envs = self.envs
self.when.validate(settings, only=only, exclude=exclude)
except ValidationError:
# if when is invalid, return canceling validation flow
return
if only_current_env:
if settings.current_env.upper() in map(
lambda s: s.upper(), self.envs
):
self._validate_items(
settings, settings.current_env, only=only, exclude=exclude
)
return
# If only using current_env, skip using_env decoration (reload)
if (
len(self.envs) == 1
and self.envs[0].upper() == settings.current_env.upper()
):
self._validate_items(
settings, settings.current_env, only=only, exclude=exclude
)
return
for env in self.envs:
self._validate_items(
settings.from_env(env), only=only, exclude=exclude
)
def _validate_items(
self,
settings: Any,
env: str | None = None,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
) -> None:
env = env or settings.current_env
for name in self.names:
# Skip if only is set and name isn't in the only list
if only and not any(name.startswith(sub) for sub in only):
continue
# Skip if exclude is set and name is in the exclude list
if exclude and any(name.startswith(sub) for sub in exclude):
continue
if self.default is not empty:
default_value = (
self.default(settings, self)
if callable(self.default)
else self.default
)
else:
default_value = empty
# THIS IS A FIX FOR #585 in contrast with #799
# toml considers signed strings "+-1" as integers
# however existing users are passing strings
# to default on validator (see #585)
# The solution we added on #667 introduced a new problem
# This fix here makes it to work for both cases.
if (
isinstance(default_value, str)
and default_value.startswith(("+", "-"))
and self.is_type_of is str
):
# avoid TOML from parsing "+-1" as integer
default_value = f"'{default_value}'"
value = settings.setdefault(
name,
default_value,
apply_default_on_none=self.apply_default_on_none,
)
# is name required but not exists?
if self.must_exist is True and value is empty:
_message = self.messages["must_exist_true"].format(
name=name, env=env
)
raise ValidationError(_message, details=[(self, _message)])
if self.must_exist is False and value is not empty:
_message = self.messages["must_exist_false"].format(
name=name, env=env
)
raise ValidationError(_message, details=[(self, _message)])
if self.must_exist in (False, None) and value is empty:
continue
if self.cast:
# value or default value already set
# by settings.setdefault above
# however we need to cast it
# so we call .set again
value = self.cast(settings.get(name))
settings.set(name, value)
# is there a callable condition?
if self.condition is not None:
if not self.condition(value):
_message = self.messages["condition"].format(
name=name,
function=self.condition.__name__,
value=value,
env=env,
)
raise ValidationError(_message, details=[(self, _message)])
# operations
for op_name, op_value in self.operations.items():
op_function = getattr(validator_conditions, op_name)
if not op_function(value, op_value):
_message = self.messages["operations"].format(
name=name,
operation=op_function.__name__,
op_value=op_value,
value=value,
env=env,
)
raise ValidationError(_message, details=[(self, _message)])
class CombinedValidator(Validator):
def __init__(
self,
validator_a: Validator,
validator_b: Validator,
*args: Any,
**kwargs: Any,
) -> None:
"""Takes 2 validators and combines the validation"""
self.validators = (validator_a, validator_b)
super().__init__(*args, **kwargs)
for attr in EQUALITY_ATTRS:
if not getattr(self, attr, None):
value = tuple(
getattr(validator, attr) for validator in self.validators
)
setattr(self, attr, value)
def validate(
self,
settings: Any,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None: # pragma: no cover
raise NotImplementedError(
"subclasses OrValidator or AndValidator implements this method"
)
class OrValidator(CombinedValidator):
"""Evaluates on Validator() | Validator()"""
def validate(
self,
settings: Any,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None:
"""Ensure at least one of the validators are valid"""
errors = []
for validator in self.validators:
try:
validator.validate(
settings,
only=only,
exclude=exclude,
only_current_env=only_current_env,
)
except ValidationError as e:
errors.append(e)
continue
else:
return
_message = self.messages["combined"].format(
errors=" or ".join(
str(e).replace("combined validators failed ", "")
for e in errors
)
)
raise ValidationError(_message, details=[(self, _message)])
class AndValidator(CombinedValidator):
"""Evaluates on Validator() & Validator()"""
def validate(
self,
settings: Any,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None:
"""Ensure both the validators are valid"""
errors = []
for validator in self.validators:
try:
validator.validate(
settings,
only=only,
exclude=exclude,
only_current_env=only_current_env,
)
except ValidationError as e:
errors.append(e)
continue
if errors:
_message = self.messages["combined"].format(
errors=" and ".join(
str(e).replace("combined validators failed ", "")
for e in errors
)
)
raise ValidationError(_message, details=[(self, _message)])
class ValidatorList(list):
def __init__(
self,
settings: Any,
validators: Sequence[Validator] | None = None,
*args: Validator,
**kwargs: Any,
) -> None:
if isinstance(validators, (list, tuple)):
args = list(args) + list(validators) # type: ignore
self._only = kwargs.pop("validate_only", None)
self._exclude = kwargs.pop("validate_exclude", None)
super().__init__(args, **kwargs) # type: ignore
self.settings = settings
def register(self, *args: Validator, **kwargs: Validator):
validators: list[Validator] = list(
chain.from_iterable(kwargs.values()) # type: ignore
)
validators.extend(args)
for validator in validators:
if validator and validator not in self:
self.append(validator)
def descriptions(self, flat: bool = False) -> dict[str, str | list[str]]:
if flat:
descriptions: dict[str, str | list[str]] = {}
else:
descriptions = defaultdict(list)
for validator in self:
for name in validator.names:
if isinstance(name, tuple) and len(name) > 0:
name = name[0]
if flat:
descriptions.setdefault(name, validator.description)
else:
descriptions[name].append( # type: ignore
validator.description
)
return descriptions
def validate(
self,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None:
for validator in self:
validator.validate(
self.settings,
only=only,
exclude=exclude,
only_current_env=only_current_env,
)
def validate_all(
self,
only: str | Sequence | None = None,
exclude: str | Sequence | None = None,
only_current_env: bool = False,
) -> None:
errors = []
details = []
for validator in self:
try:
validator.validate(
self.settings,
only=only,
exclude=exclude,
only_current_env=only_current_env,
)
except ValidationError as e:
errors.append(e)
details.append((validator, str(e)))
continue
if errors:
raise ValidationError(
"; ".join(str(e) for e in errors), details=details
)

View File

@ -0,0 +1,90 @@
# pragma: no cover
"""
Implement basic assertions to be used in assertion action
"""
from __future__ import annotations
def eq(value, other):
"""Equal"""
return value == other
def ne(value, other):
"""Not equal"""
return value != other
def gt(value, other):
"""Greater than"""
return value > other
def lt(value, other):
"""Lower than"""
return value < other
def gte(value, other):
"""Greater than or equal"""
return value >= other
def lte(value, other):
"""Lower than or equal"""
return value <= other
def identity(value, other):
"""Identity check using ID"""
return value is other
def is_type_of(value, other):
"""Type check"""
return isinstance(value, other)
def is_in(value, other):
"""Existence"""
return value in other
def is_not_in(value, other):
"""Inexistence"""
return value not in other
def cont(value, other):
"""Contains"""
return other in value
def len_eq(value, other):
"""Length Equal"""
return len(value) == other
def len_ne(value, other):
"""Length Not equal"""
return len(value) != other
def len_min(value, other):
"""Minimum length"""
return len(value) >= other
def len_max(value, other):
"""Maximum length"""
return len(value) <= other
def startswith(value, term):
"""returns value.startswith(term) result"""
return value.startswith(term)
def endswith(value, term):
"""returns value.endswith(term) result"""
return value.endswith(term)

0
libs/dynaconf/vendor/__init__.py vendored Normal file
View File

15
libs/dynaconf/vendor/box/__init__.py vendored Normal file
View File

@ -0,0 +1,15 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
__author__ = 'Chris Griffith'
__version__ = '4.2.3'
from .box import Box
from .box_list import BoxList
from .config_box import ConfigBox
from .shorthand_box import SBox
from .exceptions import BoxError, BoxKeyError
from .from_file import box_from_file

689
libs/dynaconf/vendor/box/box.py vendored Normal file
View File

@ -0,0 +1,689 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2017-2020 - Chris Griffith - MIT License
"""
Improved dictionary access through dot notation with additional tools.
"""
import copy
import re
import string
import warnings
from collections.abc import Iterable, Mapping, Callable
from keyword import kwlist
from pathlib import Path
from typing import Any, Union, Tuple, List, Dict
from dynaconf.vendor import box
from .converters import (_to_json, _from_json, _from_toml, _to_toml, _from_yaml, _to_yaml, BOX_PARAMETERS)
from .exceptions import BoxError, BoxKeyError, BoxTypeError, BoxValueError, BoxWarning
__all__ = ['Box']
_first_cap_re = re.compile('(.)([A-Z][a-z]+)')
_all_cap_re = re.compile('([a-z0-9])([A-Z])')
_list_pos_re = re.compile(r'\[(\d+)\]')
# a sentinel object for indicating no default, in order to allow users
# to pass `None` as a valid default value
NO_DEFAULT = object()
def _camel_killer(attr):
"""
CamelKiller, qu'est-ce que c'est?
Taken from http://stackoverflow.com/a/1176023/3244542
"""
attr = str(attr)
s1 = _first_cap_re.sub(r'\1_\2', attr)
s2 = _all_cap_re.sub(r'\1_\2', s1)
return re.sub(' *_+', '_', s2.lower())
def _recursive_tuples(iterable, box_class, recreate_tuples=False, **kwargs):
out_list = []
for i in iterable:
if isinstance(i, dict):
out_list.append(box_class(i, **kwargs))
elif isinstance(i, list) or (recreate_tuples and isinstance(i, tuple)):
out_list.append(_recursive_tuples(i, box_class, recreate_tuples, **kwargs))
else:
out_list.append(i)
return tuple(out_list)
def _parse_box_dots(item):
for idx, char in enumerate(item):
if char == '[':
return item[:idx], item[idx:]
elif char == '.':
return item[:idx], item[idx + 1:]
raise BoxError('Could not split box dots properly')
def _get_box_config():
return {
# Internal use only
'__created': False,
'__safe_keys': {}
}
class Box(dict):
"""
Improved dictionary access through dot notation with additional tools.
:param default_box: Similar to defaultdict, return a default value
:param default_box_attr: Specify the default replacement.
WARNING: If this is not the default 'Box', it will not be recursive
:param default_box_none_transform: When using default_box, treat keys with none values as absent. True by default
:param frozen_box: After creation, the box cannot be modified
:param camel_killer_box: Convert CamelCase to snake_case
:param conversion_box: Check for near matching keys as attributes
:param modify_tuples_box: Recreate incoming tuples with dicts into Boxes
:param box_safe_prefix: Conversion box prefix for unsafe attributes
:param box_duplicates: "ignore", "error" or "warn" when duplicates exists in a conversion_box
:param box_intact_types: tuple of types to ignore converting
:param box_recast: cast certain keys to a specified type
:param box_dots: access nested Boxes by period separated keys in string
"""
_protected_keys = [
"to_dict",
"to_json",
"to_yaml",
"from_yaml",
"from_json",
"from_toml",
"to_toml",
"merge_update",
] + [attr for attr in dir({}) if not attr.startswith("_")]
def __new__(cls, *args: Any, box_settings: Any = None, default_box: bool = False, default_box_attr: Any = NO_DEFAULT,
default_box_none_transform: bool = True, frozen_box: bool = False, camel_killer_box: bool = False,
conversion_box: bool = True, modify_tuples_box: bool = False, box_safe_prefix: str = 'x',
box_duplicates: str = 'ignore', box_intact_types: Union[Tuple, List] = (),
box_recast: Dict = None, box_dots: bool = False, **kwargs: Any):
"""
Due to the way pickling works in python 3, we need to make sure
the box config is created as early as possible.
"""
obj = super(Box, cls).__new__(cls, *args, **kwargs)
obj._box_config = _get_box_config()
obj._box_config.update({
'default_box': default_box,
'default_box_attr': cls.__class__ if default_box_attr is NO_DEFAULT else default_box_attr,
'default_box_none_transform': default_box_none_transform,
'conversion_box': conversion_box,
'box_safe_prefix': box_safe_prefix,
'frozen_box': frozen_box,
'camel_killer_box': camel_killer_box,
'modify_tuples_box': modify_tuples_box,
'box_duplicates': box_duplicates,
'box_intact_types': tuple(box_intact_types),
'box_recast': box_recast,
'box_dots': box_dots,
'box_settings': box_settings or {}
})
return obj
def __init__(self, *args: Any, box_settings: Any = None, default_box: bool = False, default_box_attr: Any = NO_DEFAULT,
default_box_none_transform: bool = True, frozen_box: bool = False, camel_killer_box: bool = False,
conversion_box: bool = True, modify_tuples_box: bool = False, box_safe_prefix: str = 'x',
box_duplicates: str = 'ignore', box_intact_types: Union[Tuple, List] = (),
box_recast: Dict = None, box_dots: bool = False, **kwargs: Any):
super().__init__()
self._box_config = _get_box_config()
self._box_config.update({
'default_box': default_box,
'default_box_attr': self.__class__ if default_box_attr is NO_DEFAULT else default_box_attr,
'default_box_none_transform': default_box_none_transform,
'conversion_box': conversion_box,
'box_safe_prefix': box_safe_prefix,
'frozen_box': frozen_box,
'camel_killer_box': camel_killer_box,
'modify_tuples_box': modify_tuples_box,
'box_duplicates': box_duplicates,
'box_intact_types': tuple(box_intact_types),
'box_recast': box_recast,
'box_dots': box_dots,
'box_settings': box_settings or {}
})
if not self._box_config['conversion_box'] and self._box_config['box_duplicates'] != 'ignore':
raise BoxError('box_duplicates are only for conversion_boxes')
if len(args) == 1:
if isinstance(args[0], str):
raise BoxValueError('Cannot extrapolate Box from string')
if isinstance(args[0], Mapping):
for k, v in args[0].items():
if v is args[0]:
v = self
if v is None and self._box_config['default_box'] and self._box_config['default_box_none_transform']:
continue
self.__setitem__(k, v)
elif isinstance(args[0], Iterable):
for k, v in args[0]:
self.__setitem__(k, v)
else:
raise BoxValueError('First argument must be mapping or iterable')
elif args:
raise BoxTypeError(f'Box expected at most 1 argument, got {len(args)}')
for k, v in kwargs.items():
if args and isinstance(args[0], Mapping) and v is args[0]:
v = self
self.__setitem__(k, v)
self._box_config['__created'] = True
def __add__(self, other: dict):
new_box = self.copy()
if not isinstance(other, dict):
raise BoxTypeError(f'Box can only merge two boxes or a box and a dictionary.')
new_box.merge_update(other)
return new_box
def __hash__(self):
if self._box_config['frozen_box']:
hashing = 54321
for item in self.items():
hashing ^= hash(item)
return hashing
raise BoxTypeError('unhashable type: "Box"')
def __dir__(self):
allowed = string.ascii_letters + string.digits + '_'
items = set(super().__dir__())
# Only show items accessible by dot notation
for key in self.keys():
key = str(key)
if ' ' not in key and key[0] not in string.digits and key not in kwlist:
for letter in key:
if letter not in allowed:
break
else:
items.add(key)
for key in self.keys():
if key not in items:
if self._box_config['conversion_box']:
key = self._safe_attr(key)
if key:
items.add(key)
return list(items)
def get(self, key, default=NO_DEFAULT):
if key not in self:
if default is NO_DEFAULT:
if self._box_config['default_box'] and self._box_config['default_box_none_transform']:
return self.__get_default(key)
else:
return None
if isinstance(default, dict) and not isinstance(default, Box):
return Box(default, box_settings=self._box_config.get("box_settings"))
if isinstance(default, list) and not isinstance(default, box.BoxList):
return box.BoxList(default)
return default
return self[key]
def copy(self):
return Box(super().copy(), **self.__box_config())
def __copy__(self):
return Box(super().copy(), **self.__box_config())
def __deepcopy__(self, memodict=None):
frozen = self._box_config['frozen_box']
config = self.__box_config()
config['frozen_box'] = False
out = self.__class__(**config)
memodict = memodict or {}
memodict[id(self)] = out
for k, v in self.items():
out[copy.deepcopy(k, memodict)] = copy.deepcopy(v, memodict)
out._box_config['frozen_box'] = frozen
return out
def __setstate__(self, state):
self._box_config = state['_box_config']
self.__dict__.update(state)
def keys(self):
return super().keys()
def values(self):
return [self[x] for x in self.keys()]
def items(self):
return [(x, self[x]) for x in self.keys()]
def __get_default(self, item):
default_value = self._box_config['default_box_attr']
if default_value in (self.__class__, dict):
value = self.__class__(**self.__box_config())
elif isinstance(default_value, dict):
value = self.__class__(**self.__box_config(), **default_value)
elif isinstance(default_value, list):
value = box.BoxList(**self.__box_config())
elif isinstance(default_value, Callable):
value = default_value()
elif hasattr(default_value, 'copy'):
value = default_value.copy()
else:
value = default_value
self.__convert_and_store(item, value)
return value
def __box_config(self):
out = {}
for k, v in self._box_config.copy().items():
if not k.startswith('__'):
out[k] = v
return out
def __recast(self, item, value):
if self._box_config['box_recast'] and item in self._box_config['box_recast']:
try:
return self._box_config['box_recast'][item](value)
except ValueError:
raise BoxValueError(f'Cannot convert {value} to {self._box_config["box_recast"][item]}') from None
return value
def __convert_and_store(self, item, value):
if self._box_config['conversion_box']:
safe_key = self._safe_attr(item)
self._box_config['__safe_keys'][safe_key] = item
if isinstance(value, (int, float, str, bytes, bytearray, bool, complex, set, frozenset)):
return super().__setitem__(item, value)
# If the value has already been converted or should not be converted, return it as-is
if self._box_config['box_intact_types'] and isinstance(value, self._box_config['box_intact_types']):
return super().__setitem__(item, value)
# This is the magic sauce that makes sub dictionaries into new box objects
if isinstance(value, dict) and not isinstance(value, Box):
value = self.__class__(value, **self.__box_config())
elif isinstance(value, list) and not isinstance(value, box.BoxList):
if self._box_config['frozen_box']:
value = _recursive_tuples(value,
self.__class__,
recreate_tuples=self._box_config['modify_tuples_box'],
**self.__box_config())
else:
value = box.BoxList(value, box_class=self.__class__, **self.__box_config())
elif self._box_config['modify_tuples_box'] and isinstance(value, tuple):
value = _recursive_tuples(value, self.__class__, recreate_tuples=True, **self.__box_config())
super().__setitem__(item, value)
def __getitem__(self, item, _ignore_default=False):
try:
return super().__getitem__(item)
except KeyError as err:
if item == '_box_config':
raise BoxKeyError('_box_config should only exist as an attribute and is never defaulted') from None
if self._box_config['box_dots'] and isinstance(item, str) and ('.' in item or '[' in item):
first_item, children = _parse_box_dots(item)
if first_item in self.keys():
if hasattr(self[first_item], '__getitem__'):
return self[first_item][children]
if self._box_config['camel_killer_box'] and isinstance(item, str):
converted = _camel_killer(item)
if converted in self.keys():
return super().__getitem__(converted)
if self._box_config['default_box'] and not _ignore_default:
return self.__get_default(item)
raise BoxKeyError(str(err)) from None
def __getattr__(self, item):
try:
try:
value = self.__getitem__(item, _ignore_default=True)
except KeyError:
value = object.__getattribute__(self, item)
except AttributeError as err:
if item == '__getstate__':
raise BoxKeyError(item) from None
if item == '_box_config':
raise BoxError('_box_config key must exist') from None
if self._box_config['conversion_box']:
safe_key = self._safe_attr(item)
if safe_key in self._box_config['__safe_keys']:
return self.__getitem__(self._box_config['__safe_keys'][safe_key])
if self._box_config['default_box']:
return self.__get_default(item)
raise BoxKeyError(str(err)) from None
return value
def __setitem__(self, key, value):
if key != '_box_config' and self._box_config['__created'] and self._box_config['frozen_box']:
raise BoxError('Box is frozen')
if self._box_config['box_dots'] and isinstance(key, str) and '.' in key:
first_item, children = _parse_box_dots(key)
if first_item in self.keys():
if hasattr(self[first_item], '__setitem__'):
return self[first_item].__setitem__(children, value)
value = self.__recast(key, value)
if key not in self.keys() and self._box_config['camel_killer_box']:
if self._box_config['camel_killer_box'] and isinstance(key, str):
key = _camel_killer(key)
if self._box_config['conversion_box'] and self._box_config['box_duplicates'] != 'ignore':
self._conversion_checks(key)
self.__convert_and_store(key, value)
def __setattr__(self, key, value):
if key != '_box_config' and self._box_config['frozen_box'] and self._box_config['__created']:
raise BoxError('Box is frozen')
if key in self._protected_keys:
raise BoxKeyError(f'Key name "{key}" is protected')
if key == '_box_config':
return object.__setattr__(self, key, value)
value = self.__recast(key, value)
safe_key = self._safe_attr(key)
if safe_key in self._box_config['__safe_keys']:
key = self._box_config['__safe_keys'][safe_key]
self.__setitem__(key, value)
def __delitem__(self, key):
if self._box_config['frozen_box']:
raise BoxError('Box is frozen')
if key not in self.keys() and self._box_config['box_dots'] and isinstance(key, str) and '.' in key:
first_item, children = key.split('.', 1)
if first_item in self.keys() and isinstance(self[first_item], dict):
return self[first_item].__delitem__(children)
if key not in self.keys() and self._box_config['camel_killer_box']:
if self._box_config['camel_killer_box'] and isinstance(key, str):
for each_key in self:
if _camel_killer(key) == each_key:
key = each_key
break
super().__delitem__(key)
def __delattr__(self, item):
if self._box_config['frozen_box']:
raise BoxError('Box is frozen')
if item == '_box_config':
raise BoxError('"_box_config" is protected')
if item in self._protected_keys:
raise BoxKeyError(f'Key name "{item}" is protected')
try:
self.__delitem__(item)
except KeyError as err:
if self._box_config['conversion_box']:
safe_key = self._safe_attr(item)
if safe_key in self._box_config['__safe_keys']:
self.__delitem__(self._box_config['__safe_keys'][safe_key])
del self._box_config['__safe_keys'][safe_key]
return
raise BoxKeyError(err)
def pop(self, key, *args):
if args:
if len(args) != 1:
raise BoxError('pop() takes only one optional argument "default"')
try:
item = self[key]
except KeyError:
return args[0]
else:
del self[key]
return item
try:
item = self[key]
except KeyError:
raise BoxKeyError('{0}'.format(key)) from None
else:
del self[key]
return item
def clear(self):
super().clear()
self._box_config['__safe_keys'].clear()
def popitem(self):
try:
key = next(self.__iter__())
except StopIteration:
raise BoxKeyError('Empty box') from None
return key, self.pop(key)
def __repr__(self):
return f'<Box: {self.to_dict()}>'
def __str__(self):
return str(self.to_dict())
def __iter__(self):
for key in self.keys():
yield key
def __reversed__(self):
for key in reversed(list(self.keys())):
yield key
def to_dict(self):
"""
Turn the Box and sub Boxes back into a native python dictionary.
:return: python dictionary of this Box
"""
out_dict = dict(self)
for k, v in out_dict.items():
if v is self:
out_dict[k] = out_dict
elif isinstance(v, Box):
out_dict[k] = v.to_dict()
elif isinstance(v, box.BoxList):
out_dict[k] = v.to_list()
return out_dict
def update(self, __m=None, **kwargs):
if __m:
if hasattr(__m, 'keys'):
for k in __m:
self.__convert_and_store(k, __m[k])
else:
for k, v in __m:
self.__convert_and_store(k, v)
for k in kwargs:
self.__convert_and_store(k, kwargs[k])
def merge_update(self, __m=None, **kwargs):
def convert_and_set(k, v):
intact_type = (self._box_config['box_intact_types'] and isinstance(v, self._box_config['box_intact_types']))
if isinstance(v, dict) and not intact_type:
# Box objects must be created in case they are already
# in the `converted` box_config set
v = self.__class__(v, **self.__box_config())
if k in self and isinstance(self[k], dict):
if isinstance(self[k], Box):
self[k].merge_update(v)
else:
self[k].update(v)
return
if isinstance(v, list) and not intact_type:
v = box.BoxList(v, **self.__box_config())
self.__setitem__(k, v)
if __m:
if hasattr(__m, 'keys'):
for key in __m:
convert_and_set(key, __m[key])
else:
for key, value in __m:
convert_and_set(key, value)
for key in kwargs:
convert_and_set(key, kwargs[key])
def setdefault(self, item, default=None):
if item in self:
return self[item]
if isinstance(default, dict):
default = self.__class__(default, **self.__box_config())
if isinstance(default, list):
default = box.BoxList(default, box_class=self.__class__, **self.__box_config())
self[item] = default
return default
def _safe_attr(self, attr):
"""Convert a key into something that is accessible as an attribute"""
allowed = string.ascii_letters + string.digits + '_'
if isinstance(attr, tuple):
attr = "_".join([str(x) for x in attr])
attr = attr.decode('utf-8', 'ignore') if isinstance(attr, bytes) else str(attr)
if self.__box_config()['camel_killer_box']:
attr = _camel_killer(attr)
out = []
last_safe = 0
for i, character in enumerate(attr):
if character in allowed:
last_safe = i
out.append(character)
elif not out:
continue
else:
if last_safe == i - 1:
out.append('_')
out = "".join(out)[:last_safe + 1]
try:
int(out[0])
except (ValueError, IndexError):
pass
else:
out = f'{self.__box_config()["box_safe_prefix"]}{out}'
if out in kwlist:
out = f'{self.__box_config()["box_safe_prefix"]}{out}'
return out
def _conversion_checks(self, item):
"""
Internal use for checking if a duplicate safe attribute already exists
:param item: Item to see if a dup exists
:param keys: Keys to check against
"""
safe_item = self._safe_attr(item)
if safe_item in self._box_config['__safe_keys']:
dups = [f'{item}({safe_item})', f'{self._box_config["__safe_keys"][safe_item]}({safe_item})']
if self._box_config['box_duplicates'].startswith('warn'):
warnings.warn(f'Duplicate conversion attributes exist: {dups}', BoxWarning)
else:
raise BoxError(f'Duplicate conversion attributes exist: {dups}')
def to_json(self, filename: Union[str, Path] = None, encoding: str = 'utf-8', errors: str = 'strict',
**json_kwargs):
"""
Transform the Box object into a JSON string.
:param filename: If provided will save to file
:param encoding: File encoding
:param errors: How to handle encoding errors
:param json_kwargs: additional arguments to pass to json.dump(s)
:return: string of JSON (if no filename provided)
"""
return _to_json(self.to_dict(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
@classmethod
def from_json(cls, json_string: str = None, filename: Union[str, Path] = None, encoding: str = 'utf-8',
errors: str = 'strict', **kwargs):
"""
Transform a json object string into a Box object. If the incoming
json is a list, you must use BoxList.from_json.
:param json_string: string to pass to `json.loads`
:param filename: filename to open and pass to `json.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `Box()` or `json.loads`
:return: Box object from json data
"""
box_args = {}
for arg in kwargs.copy():
if arg in BOX_PARAMETERS:
box_args[arg] = kwargs.pop(arg)
data = _from_json(json_string, filename=filename, encoding=encoding, errors=errors, **kwargs)
if not isinstance(data, dict):
raise BoxError(f'json data not returned as a dictionary, but rather a {type(data).__name__}')
return cls(data, **box_args)
def to_yaml(self, filename: Union[str, Path] = None, default_flow_style: bool = False, encoding: str = 'utf-8',
errors: str = 'strict', **yaml_kwargs):
"""
Transform the Box object into a YAML string.
:param filename: If provided will save to file
:param default_flow_style: False will recursively dump dicts
:param encoding: File encoding
:param errors: How to handle encoding errors
:param yaml_kwargs: additional arguments to pass to yaml.dump
:return: string of YAML (if no filename provided)
"""
return _to_yaml(self.to_dict(), filename=filename, default_flow_style=default_flow_style,
encoding=encoding, errors=errors, **yaml_kwargs)
@classmethod
def from_yaml(cls, yaml_string: str = None, filename: Union[str, Path] = None, encoding: str = 'utf-8',
errors: str = 'strict', **kwargs):
"""
Transform a yaml object string into a Box object. By default will use SafeLoader.
:param yaml_string: string to pass to `yaml.load`
:param filename: filename to open and pass to `yaml.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `Box()` or `yaml.load`
:return: Box object from yaml data
"""
box_args = {}
for arg in kwargs.copy():
if arg in BOX_PARAMETERS:
box_args[arg] = kwargs.pop(arg)
data = _from_yaml(yaml_string=yaml_string, filename=filename, encoding=encoding, errors=errors, **kwargs)
if not isinstance(data, dict):
raise BoxError(f'yaml data not returned as a dictionary but rather a {type(data).__name__}')
return cls(data, **box_args)
def to_toml(self, filename: Union[str, Path] = None, encoding: str = 'utf-8', errors: str = 'strict'):
"""
Transform the Box object into a toml string.
:param filename: File to write toml object too
:param encoding: File encoding
:param errors: How to handle encoding errors
:return: string of TOML (if no filename provided)
"""
return _to_toml(self.to_dict(), filename=filename, encoding=encoding, errors=errors)
@classmethod
def from_toml(cls, toml_string: str = None, filename: Union[str, Path] = None,
encoding: str = 'utf-8', errors: str = 'strict', **kwargs):
"""
Transforms a toml string or file into a Box object
:param toml_string: string to pass to `toml.load`
:param filename: filename to open and pass to `toml.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `Box()`
:return:
"""
box_args = {}
for arg in kwargs.copy():
if arg in BOX_PARAMETERS:
box_args[arg] = kwargs.pop(arg)
data = _from_toml(toml_string=toml_string, filename=filename, encoding=encoding, errors=errors)
return cls(data, **box_args)

276
libs/dynaconf/vendor/box/box_list.py vendored Normal file
View File

@ -0,0 +1,276 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2017-2020 - Chris Griffith - MIT License
import copy
import re
from typing import Iterable, Optional
from dynaconf.vendor import box
from .converters import (_to_yaml, _from_yaml, _to_json, _from_json,
_to_toml, _from_toml, _to_csv, _from_csv, BOX_PARAMETERS)
from .exceptions import BoxError, BoxTypeError, BoxKeyError
_list_pos_re = re.compile(r'\[(\d+)\]')
DYNABOX_CLASS = None # a cache constant to avoid multiple imports
def get_dynabox_class_avoiding_circular_import():
"""
See dynaconf issue #462
"""
global DYNABOX_CLASS
if DYNABOX_CLASS is None:
from dynaconf.utils.boxing import DynaBox
DYNABOX_CLASS = DynaBox
return DYNABOX_CLASS
class BoxList(list):
"""
Drop in replacement of list, that converts added objects to Box or BoxList
objects as necessary.
"""
def __init__(self, iterable: Iterable = None, box_class : Optional[box.Box] = None, **box_options):
self.box_class = box_class or get_dynabox_class_avoiding_circular_import()
self.box_options = box_options
self.box_org_ref = self.box_org_ref = id(iterable) if iterable else 0
if iterable:
for x in iterable:
self.append(x)
if box_options.get('frozen_box'):
def frozen(*args, **kwargs):
raise BoxError('BoxList is frozen')
for method in ['append', 'extend', 'insert', 'pop', 'remove', 'reverse', 'sort']:
self.__setattr__(method, frozen)
def __getitem__(self, item):
if self.box_options.get('box_dots') and isinstance(item, str) and item.startswith('['):
list_pos = _list_pos_re.search(item)
value = super(BoxList, self).__getitem__(int(list_pos.groups()[0]))
if len(list_pos.group()) == len(item):
return value
return value.__getitem__(item[len(list_pos.group()):].lstrip('.'))
return super(BoxList, self).__getitem__(item)
def __delitem__(self, key):
if self.box_options.get('frozen_box'):
raise BoxError('BoxList is frozen')
super(BoxList, self).__delitem__(key)
def __setitem__(self, key, value):
if self.box_options.get('frozen_box'):
raise BoxError('BoxList is frozen')
if self.box_options.get('box_dots') and isinstance(key, str) and key.startswith('['):
list_pos = _list_pos_re.search(key)
pos = int(list_pos.groups()[0])
if len(list_pos.group()) == len(key):
return super(BoxList, self).__setitem__(pos, value)
return super(BoxList, self).__getitem__(pos).__setitem__(key[len(list_pos.group()):].lstrip('.'), value)
super(BoxList, self).__setitem__(key, value)
def _is_intact_type(self, obj):
try:
if self.box_options.get('box_intact_types') and isinstance(obj, self.box_options['box_intact_types']):
return True
except AttributeError as err:
if 'box_options' in self.__dict__:
raise BoxKeyError(err)
return False
def append(self, p_object):
if isinstance(p_object, dict) and not self._is_intact_type(p_object):
try:
p_object = self.box_class(p_object, **self.box_options)
except AttributeError as err:
if 'box_class' in self.__dict__:
raise BoxKeyError(err)
elif isinstance(p_object, list) and not self._is_intact_type(p_object):
try:
p_object = (self if id(p_object) == self.box_org_ref else BoxList(p_object, **self.box_options))
except AttributeError as err:
if 'box_org_ref' in self.__dict__:
raise BoxKeyError(err)
super(BoxList, self).append(p_object)
def extend(self, iterable):
for item in iterable:
self.append(item)
def insert(self, index, p_object):
if isinstance(p_object, dict) and not self._is_intact_type(p_object):
p_object = self.box_class(p_object, **self.box_options)
elif isinstance(p_object, list) and not self._is_intact_type(p_object):
p_object = (self if id(p_object) == self.box_org_ref else BoxList(p_object))
super(BoxList, self).insert(index, p_object)
def __repr__(self):
return f'<BoxList: {self.to_list()}>'
def __str__(self):
return str(self.to_list())
def __copy__(self):
return BoxList((x for x in self), self.box_class, **self.box_options)
def __deepcopy__(self, memo=None):
out = self.__class__()
memo = memo or {}
memo[id(self)] = out
for k in self:
out.append(copy.deepcopy(k, memo=memo))
return out
def __hash__(self):
if self.box_options.get('frozen_box'):
hashing = 98765
hashing ^= hash(tuple(self))
return hashing
raise BoxTypeError("unhashable type: 'BoxList'")
def to_list(self):
new_list = []
for x in self:
if x is self:
new_list.append(new_list)
elif isinstance(x, box.Box):
new_list.append(x.to_dict())
elif isinstance(x, BoxList):
new_list.append(x.to_list())
else:
new_list.append(x)
return new_list
def to_json(self, filename: str = None, encoding: str = 'utf-8', errors: str = 'strict',
multiline: bool = False, **json_kwargs):
"""
Transform the BoxList object into a JSON string.
:param filename: If provided will save to file
:param encoding: File encoding
:param errors: How to handle encoding errors
:param multiline: Put each item in list onto it's own line
:param json_kwargs: additional arguments to pass to json.dump(s)
:return: string of JSON or return of `json.dump`
"""
if filename and multiline:
lines = [_to_json(item, filename=False, encoding=encoding, errors=errors, **json_kwargs) for item in self]
with open(filename, 'w', encoding=encoding, errors=errors) as f:
f.write("\n".join(lines))
else:
return _to_json(self.to_list(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
@classmethod
def from_json(cls, json_string: str = None, filename: str = None, encoding: str = 'utf-8', errors: str = 'strict',
multiline: bool = False, **kwargs):
"""
Transform a json object string into a BoxList object. If the incoming
json is a dict, you must use Box.from_json.
:param json_string: string to pass to `json.loads`
:param filename: filename to open and pass to `json.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param multiline: One object per line
:param kwargs: parameters to pass to `Box()` or `json.loads`
:return: BoxList object from json data
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_json(json_string, filename=filename, encoding=encoding,
errors=errors, multiline=multiline, **kwargs)
if not isinstance(data, list):
raise BoxError(f'json data not returned as a list, but rather a {type(data).__name__}')
return cls(data, **bx_args)
def to_yaml(self, filename: str = None, default_flow_style: bool = False,
encoding: str = 'utf-8', errors: str = 'strict', **yaml_kwargs):
"""
Transform the BoxList object into a YAML string.
:param filename: If provided will save to file
:param default_flow_style: False will recursively dump dicts
:param encoding: File encoding
:param errors: How to handle encoding errors
:param yaml_kwargs: additional arguments to pass to yaml.dump
:return: string of YAML or return of `yaml.dump`
"""
return _to_yaml(self.to_list(), filename=filename, default_flow_style=default_flow_style,
encoding=encoding, errors=errors, **yaml_kwargs)
@classmethod
def from_yaml(cls, yaml_string: str = None, filename: str = None,
encoding: str = 'utf-8', errors: str = 'strict', **kwargs):
"""
Transform a yaml object string into a BoxList object.
:param yaml_string: string to pass to `yaml.load`
:param filename: filename to open and pass to `yaml.load`
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `BoxList()` or `yaml.load`
:return: BoxList object from yaml data
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_yaml(yaml_string=yaml_string, filename=filename, encoding=encoding, errors=errors, **kwargs)
if not isinstance(data, list):
raise BoxError(f'yaml data not returned as a list but rather a {type(data).__name__}')
return cls(data, **bx_args)
def to_toml(self, filename: str = None, key_name: str = 'toml', encoding: str = 'utf-8', errors: str = 'strict'):
"""
Transform the BoxList object into a toml string.
:param filename: File to write toml object too
:param key_name: Specify the name of the key to store the string under
(cannot directly convert to toml)
:param encoding: File encoding
:param errors: How to handle encoding errors
:return: string of TOML (if no filename provided)
"""
return _to_toml({key_name: self.to_list()}, filename=filename, encoding=encoding, errors=errors)
@classmethod
def from_toml(cls, toml_string: str = None, filename: str = None, key_name: str = 'toml',
encoding: str = 'utf-8', errors: str = 'strict', **kwargs):
"""
Transforms a toml string or file into a BoxList object
:param toml_string: string to pass to `toml.load`
:param filename: filename to open and pass to `toml.load`
:param key_name: Specify the name of the key to pull the list from
(cannot directly convert from toml)
:param encoding: File encoding
:param errors: How to handle encoding errors
:param kwargs: parameters to pass to `Box()`
:return:
"""
bx_args = {}
for arg in list(kwargs.keys()):
if arg in BOX_PARAMETERS:
bx_args[arg] = kwargs.pop(arg)
data = _from_toml(toml_string=toml_string, filename=filename, encoding=encoding, errors=errors)
if key_name not in data:
raise BoxError(f'{key_name} was not found.')
return cls(data[key_name], **bx_args)
def to_csv(self, filename, encoding: str = 'utf-8', errors: str = 'strict'):
_to_csv(self, filename=filename, encoding=encoding, errors=errors)
@classmethod
def from_csv(cls, filename, encoding: str = 'utf-8', errors: str = 'strict'):
return cls(_from_csv(filename=filename, encoding=encoding, errors=errors))

133
libs/dynaconf/vendor/box/config_box.py vendored Normal file
View File

@ -0,0 +1,133 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from dynaconf.vendor.box.box import Box
class ConfigBox(Box):
"""
Modified box object to add object transforms.
Allows for build in transforms like:
cns = ConfigBox(my_bool='yes', my_int='5', my_list='5,4,3,3,2')
cns.bool('my_bool') # True
cns.int('my_int') # 5
cns.list('my_list', mod=lambda x: int(x)) # [5, 4, 3, 3, 2]
"""
_protected_keys = dir(Box) + ['bool', 'int', 'float', 'list', 'getboolean', 'getfloat', 'getint']
def __getattr__(self, item):
"""
Config file keys are stored in lower case, be a little more
loosey goosey
"""
try:
return super().__getattr__(item)
except AttributeError:
return super().__getattr__(item.lower())
def __dir__(self):
return super().__dir__() + ['bool', 'int', 'float', 'list', 'getboolean', 'getfloat', 'getint']
def bool(self, item, default=None):
"""
Return value of key as a boolean
:param item: key of value to transform
:param default: value to return if item does not exist
:return: approximated bool of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
if isinstance(item, (bool, int)):
return bool(item)
if (isinstance(item, str)
and item.lower() in ('n', 'no', 'false', 'f', '0')):
return False
return True if item else False
def int(self, item, default=None):
"""
Return value of key as an int
:param item: key of value to transform
:param default: value to return if item does not exist
:return: int of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
return int(item)
def float(self, item, default=None):
"""
Return value of key as a float
:param item: key of value to transform
:param default: value to return if item does not exist
:return: float of value
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
return float(item)
def list(self, item, default=None, spliter=",", strip=True, mod=None):
"""
Return value of key as a list
:param item: key of value to transform
:param mod: function to map against list
:param default: value to return if item does not exist
:param spliter: character to split str on
:param strip: clean the list with the `strip`
:return: list of items
"""
try:
item = self.__getattr__(item)
except AttributeError as err:
if default is not None:
return default
raise err
if strip:
item = item.lstrip('[').rstrip(']')
out = [x.strip() if strip else x for x in item.split(spliter)]
if mod:
return list(map(mod, out))
return out
# loose configparser compatibility
def getboolean(self, item, default=None):
return self.bool(item, default)
def getint(self, item, default=None):
return self.int(item, default)
def getfloat(self, item, default=None):
return self.float(item, default)
def __repr__(self):
return '<ConfigBox: {0}>'.format(str(self.to_dict()))
def copy(self):
return ConfigBox(super().copy())
def __copy__(self):
return ConfigBox(super().copy())

129
libs/dynaconf/vendor/box/converters.py vendored Normal file
View File

@ -0,0 +1,129 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Abstract converter functions for use in any Box class
import csv
import json
import sys
import warnings
from pathlib import Path
import dynaconf.vendor.ruamel.yaml as yaml
from dynaconf.vendor.box.exceptions import BoxError, BoxWarning
from dynaconf.vendor import tomllib as toml
BOX_PARAMETERS = ('default_box', 'default_box_attr', 'conversion_box',
'frozen_box', 'camel_killer_box',
'box_safe_prefix', 'box_duplicates', 'ordered_box',
'default_box_none_transform', 'box_dots', 'modify_tuples_box',
'box_intact_types', 'box_recast')
def _exists(filename, create=False):
path = Path(filename)
if create:
try:
path.touch(exist_ok=True)
except OSError as err:
raise BoxError(f'Could not create file {filename} - {err}')
else:
return
if not path.exists():
raise BoxError(f'File "{filename}" does not exist')
if not path.is_file():
raise BoxError(f'{filename} is not a file')
def _to_json(obj, filename=None, encoding="utf-8", errors="strict", **json_kwargs):
json_dump = json.dumps(obj, ensure_ascii=False, **json_kwargs)
if filename:
_exists(filename, create=True)
with open(filename, 'w', encoding=encoding, errors=errors) as f:
f.write(json_dump if sys.version_info >= (3, 0) else json_dump.decode("utf-8"))
else:
return json_dump
def _from_json(json_string=None, filename=None, encoding="utf-8", errors="strict", multiline=False, **kwargs):
if filename:
_exists(filename)
with open(filename, 'r', encoding=encoding, errors=errors) as f:
if multiline:
data = [json.loads(line.strip(), **kwargs) for line in f
if line.strip() and not line.strip().startswith("#")]
else:
data = json.load(f, **kwargs)
elif json_string:
data = json.loads(json_string, **kwargs)
else:
raise BoxError('from_json requires a string or filename')
return data
def _to_yaml(obj, filename=None, default_flow_style=False, encoding="utf-8", errors="strict", **yaml_kwargs):
if filename:
_exists(filename, create=True)
with open(filename, 'w',
encoding=encoding, errors=errors) as f:
yaml.dump(obj, stream=f, default_flow_style=default_flow_style, **yaml_kwargs)
else:
return yaml.dump(obj, default_flow_style=default_flow_style, **yaml_kwargs)
def _from_yaml(yaml_string=None, filename=None, encoding="utf-8", errors="strict", **kwargs):
if 'Loader' not in kwargs:
kwargs['Loader'] = yaml.SafeLoader
if filename:
_exists(filename)
with open(filename, 'r', encoding=encoding, errors=errors) as f:
data = yaml.load(f, **kwargs)
elif yaml_string:
data = yaml.load(yaml_string, **kwargs)
else:
raise BoxError('from_yaml requires a string or filename')
return data
def _to_toml(obj, filename=None, encoding="utf-8", errors="strict"):
if filename:
_exists(filename, create=True)
with open(filename, 'w', encoding=encoding, errors=errors) as f:
toml.dump(obj, f)
else:
return toml.dumps(obj)
def _from_toml(toml_string=None, filename=None, encoding="utf-8", errors="strict"):
if filename:
_exists(filename)
with open(filename, 'r', encoding=encoding, errors=errors) as f:
data = toml.load(f)
elif toml_string:
data = toml.loads(toml_string)
else:
raise BoxError('from_toml requires a string or filename')
return data
def _to_csv(box_list, filename, encoding="utf-8", errors="strict"):
csv_column_names = list(box_list[0].keys())
for row in box_list:
if list(row.keys()) != csv_column_names:
raise BoxError('BoxList must contain the same dictionary structure for every item to convert to csv')
if filename:
_exists(filename, create=True)
with open(filename, 'w', encoding=encoding, errors=errors, newline='') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=csv_column_names)
writer.writeheader()
for data in box_list:
writer.writerow(data)
def _from_csv(filename, encoding="utf-8", errors="strict"):
_exists(filename)
with open(filename, 'r', encoding=encoding, errors=errors, newline='') as f:
reader = csv.DictReader(f)
return [row for row in reader]

22
libs/dynaconf/vendor/box/exceptions.py vendored Normal file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
class BoxError(Exception):
"""Non standard dictionary exceptions"""
class BoxKeyError(BoxError, KeyError, AttributeError):
"""Key does not exist"""
class BoxTypeError(BoxError, TypeError):
"""Cannot handle that instance's type"""
class BoxValueError(BoxError, ValueError):
"""Issue doing something with that value"""
class BoxWarning(UserWarning):
"""Here be dragons"""

73
libs/dynaconf/vendor/box/from_file.py vendored Normal file
View File

@ -0,0 +1,73 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from json import JSONDecodeError
from pathlib import Path
from typing import Union
from dynaconf.vendor.tomllib import TOMLDecodeError
from dynaconf.vendor.ruamel.yaml import YAMLError
from .exceptions import BoxError
from .box import Box
from .box_list import BoxList
__all__ = ['box_from_file']
def _to_json(data):
try:
return Box.from_json(data)
except JSONDecodeError:
raise BoxError('File is not JSON as expected')
except BoxError:
return BoxList.from_json(data)
def _to_yaml(data):
try:
return Box.from_yaml(data)
except YAMLError:
raise BoxError('File is not YAML as expected')
except BoxError:
return BoxList.from_yaml(data)
def _to_toml(data):
try:
return Box.from_toml(data)
except TOMLDecodeError:
raise BoxError('File is not TOML as expected')
def box_from_file(file: Union[str, Path], file_type: str = None,
encoding: str = "utf-8", errors: str = "strict") -> Union[Box, BoxList]:
"""
Loads the provided file and tries to parse it into a Box or BoxList object as appropriate.
:param file: Location of file
:param encoding: File encoding
:param errors: How to handle encoding errors
:param file_type: manually specify file type: json, toml or yaml
:return: Box or BoxList
"""
if not isinstance(file, Path):
file = Path(file)
if not file.exists():
raise BoxError(f'file "{file}" does not exist')
data = file.read_text(encoding=encoding, errors=errors)
if file_type:
if file_type.lower() == 'json':
return _to_json(data)
if file_type.lower() == 'yaml':
return _to_yaml(data)
if file_type.lower() == 'toml':
return _to_toml(data)
raise BoxError(f'"{file_type}" is an unknown type, please use either toml, yaml or json')
if file.suffix in ('.json', '.jsn'):
return _to_json(data)
if file.suffix in ('.yaml', '.yml'):
return _to_yaml(data)
if file.suffix in ('.tml', '.toml'):
return _to_toml(data)
raise BoxError(f'Could not determine file type based off extension, please provide file_type')

View File

@ -0,0 +1,38 @@
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from dynaconf.vendor.box.box import Box
class SBox(Box):
"""
ShorthandBox (SBox) allows for
property access of `dict` `json` and `yaml`
"""
_protected_keys = dir({}) + ['to_dict', 'to_json', 'to_yaml', 'json', 'yaml', 'from_yaml', 'from_json',
'dict', 'toml', 'from_toml', 'to_toml']
@property
def dict(self):
return self.to_dict()
@property
def json(self):
return self.to_json()
@property
def yaml(self):
return self.to_yaml()
@property
def toml(self):
return self.to_toml()
def __repr__(self):
return '<ShorthandBox: {0}>'.format(str(self.to_dict()))
def copy(self):
return SBox(super(SBox, self).copy())
def __copy__(self):
return SBox(super(SBox, self).copy())

75
libs/dynaconf/vendor/click/__init__.py vendored Normal file
View File

@ -0,0 +1,75 @@
"""
Click is a simple Python module inspired by the stdlib optparse to make
writing command line scripts fun. Unlike other modules, it's based
around a simple API that does not come with too much magic and is
composable.
"""
from .core import Argument
from .core import BaseCommand
from .core import Command
from .core import CommandCollection
from .core import Context
from .core import Group
from .core import MultiCommand
from .core import Option
from .core import Parameter
from .decorators import argument
from .decorators import command
from .decorators import confirmation_option
from .decorators import group
from .decorators import help_option
from .decorators import make_pass_decorator
from .decorators import option
from .decorators import pass_context
from .decorators import pass_obj
from .decorators import password_option
from .decorators import version_option
from .exceptions import Abort
from .exceptions import BadArgumentUsage
from .exceptions import BadOptionUsage
from .exceptions import BadParameter
from .exceptions import ClickException
from .exceptions import FileError
from .exceptions import MissingParameter
from .exceptions import NoSuchOption
from .exceptions import UsageError
from .formatting import HelpFormatter
from .formatting import wrap_text
from .globals import get_current_context
from .parser import OptionParser
from .termui import clear
from .termui import confirm
from .termui import echo_via_pager
from .termui import edit
from .termui import get_terminal_size
from .termui import getchar
from .termui import launch
from .termui import pause
from .termui import progressbar
from .termui import prompt
from .termui import secho
from .termui import style
from .termui import unstyle
from .types import BOOL
from .types import Choice
from .types import DateTime
from .types import File
from .types import FLOAT
from .types import FloatRange
from .types import INT
from .types import IntRange
from .types import ParamType
from .types import Path
from .types import STRING
from .types import Tuple
from .types import UNPROCESSED
from .types import UUID
from .utils import echo
from .utils import format_filename
from .utils import get_app_dir
from .utils import get_binary_stream
from .utils import get_os_args
from .utils import get_text_stream
from .utils import open_file
__version__ = "8.0.0.dev"

View File

@ -0,0 +1,371 @@
import copy
import os
import re
from collections import abc
from .core import Argument
from .core import MultiCommand
from .core import Option
from .parser import split_arg_string
from .types import Choice
from .utils import echo
WORDBREAK = "="
# Note, only BASH version 4.4 and later have the nosort option.
COMPLETION_SCRIPT_BASH = """
%(complete_func)s() {
local IFS=$'\n'
COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \\
COMP_CWORD=$COMP_CWORD \\
%(autocomplete_var)s=complete $1 ) )
return 0
}
%(complete_func)setup() {
local COMPLETION_OPTIONS=""
local BASH_VERSION_ARR=(${BASH_VERSION//./ })
# Only BASH version 4.4 and later have the nosort option.
if [ ${BASH_VERSION_ARR[0]} -gt 4 ] || ([ ${BASH_VERSION_ARR[0]} -eq 4 ] \
&& [ ${BASH_VERSION_ARR[1]} -ge 4 ]); then
COMPLETION_OPTIONS="-o nosort"
fi
complete $COMPLETION_OPTIONS -F %(complete_func)s %(script_names)s
}
%(complete_func)setup
"""
COMPLETION_SCRIPT_ZSH = """
#compdef %(script_names)s
%(complete_func)s() {
local -a completions
local -a completions_with_descriptions
local -a response
(( ! $+commands[%(script_names)s] )) && return 1
response=("${(@f)$( env COMP_WORDS=\"${words[*]}\" \\
COMP_CWORD=$((CURRENT-1)) \\
%(autocomplete_var)s=\"complete_zsh\" \\
%(script_names)s )}")
for key descr in ${(kv)response}; do
if [[ "$descr" == "_" ]]; then
completions+=("$key")
else
completions_with_descriptions+=("$key":"$descr")
fi
done
if [ -n "$completions_with_descriptions" ]; then
_describe -V unsorted completions_with_descriptions -U
fi
if [ -n "$completions" ]; then
compadd -U -V unsorted -a completions
fi
compstate[insert]="automenu"
}
compdef %(complete_func)s %(script_names)s
"""
COMPLETION_SCRIPT_FISH = (
"complete --no-files --command %(script_names)s --arguments"
' "(env %(autocomplete_var)s=complete_fish'
" COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t)"
' %(script_names)s)"'
)
_completion_scripts = {
"bash": COMPLETION_SCRIPT_BASH,
"zsh": COMPLETION_SCRIPT_ZSH,
"fish": COMPLETION_SCRIPT_FISH,
}
_invalid_ident_char_re = re.compile(r"[^a-zA-Z0-9_]")
def get_completion_script(prog_name, complete_var, shell):
cf_name = _invalid_ident_char_re.sub("", prog_name.replace("-", "_"))
script = _completion_scripts.get(shell, COMPLETION_SCRIPT_BASH)
return (
script
% {
"complete_func": f"_{cf_name}_completion",
"script_names": prog_name,
"autocomplete_var": complete_var,
}
).strip() + ";"
def resolve_ctx(cli, prog_name, args):
"""Parse into a hierarchy of contexts. Contexts are connected
through the parent variable.
:param cli: command definition
:param prog_name: the program that is running
:param args: full list of args
:return: the final context/command parsed
"""
ctx = cli.make_context(prog_name, args, resilient_parsing=True)
args = ctx.protected_args + ctx.args
while args:
if isinstance(ctx.command, MultiCommand):
if not ctx.command.chain:
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
if cmd is None:
return ctx
ctx = cmd.make_context(
cmd_name, args, parent=ctx, resilient_parsing=True
)
args = ctx.protected_args + ctx.args
else:
# Walk chained subcommand contexts saving the last one.
while args:
cmd_name, cmd, args = ctx.command.resolve_command(ctx, args)
if cmd is None:
return ctx
sub_ctx = cmd.make_context(
cmd_name,
args,
parent=ctx,
allow_extra_args=True,
allow_interspersed_args=False,
resilient_parsing=True,
)
args = sub_ctx.args
ctx = sub_ctx
args = sub_ctx.protected_args + sub_ctx.args
else:
break
return ctx
def start_of_option(param_str):
"""
:param param_str: param_str to check
:return: whether or not this is the start of an option declaration
(i.e. starts "-" or "--")
"""
return param_str and param_str[:1] == "-"
def is_incomplete_option(all_args, cmd_param):
"""
:param all_args: the full original list of args supplied
:param cmd_param: the current command parameter
:return: whether or not the last option declaration (i.e. starts
"-" or "--") is incomplete and corresponds to this cmd_param. In
other words whether this cmd_param option can still accept
values
"""
if not isinstance(cmd_param, Option):
return False
if cmd_param.is_flag:
return False
last_option = None
for index, arg_str in enumerate(
reversed([arg for arg in all_args if arg != WORDBREAK])
):
if index + 1 > cmd_param.nargs:
break
if start_of_option(arg_str):
last_option = arg_str
return True if last_option and last_option in cmd_param.opts else False
def is_incomplete_argument(current_params, cmd_param):
"""
:param current_params: the current params and values for this
argument as already entered
:param cmd_param: the current command parameter
:return: whether or not the last argument is incomplete and
corresponds to this cmd_param. In other words whether or not the
this cmd_param argument can still accept values
"""
if not isinstance(cmd_param, Argument):
return False
current_param_values = current_params[cmd_param.name]
if current_param_values is None:
return True
if cmd_param.nargs == -1:
return True
if (
isinstance(current_param_values, abc.Iterable)
and cmd_param.nargs > 1
and len(current_param_values) < cmd_param.nargs
):
return True
return False
def get_user_autocompletions(ctx, args, incomplete, cmd_param):
"""
:param ctx: context associated with the parsed command
:param args: full list of args
:param incomplete: the incomplete text to autocomplete
:param cmd_param: command definition
:return: all the possible user-specified completions for the param
"""
results = []
if isinstance(cmd_param.type, Choice):
# Choices don't support descriptions.
results = [
(c, None) for c in cmd_param.type.choices if str(c).startswith(incomplete)
]
elif cmd_param.autocompletion is not None:
dynamic_completions = cmd_param.autocompletion(
ctx=ctx, args=args, incomplete=incomplete
)
results = [
c if isinstance(c, tuple) else (c, None) for c in dynamic_completions
]
return results
def get_visible_commands_starting_with(ctx, starts_with):
"""
:param ctx: context associated with the parsed command
:starts_with: string that visible commands must start with.
:return: all visible (not hidden) commands that start with starts_with.
"""
for c in ctx.command.list_commands(ctx):
if c.startswith(starts_with):
command = ctx.command.get_command(ctx, c)
if not command.hidden:
yield command
def add_subcommand_completions(ctx, incomplete, completions_out):
# Add subcommand completions.
if isinstance(ctx.command, MultiCommand):
completions_out.extend(
[
(c.name, c.get_short_help_str())
for c in get_visible_commands_starting_with(ctx, incomplete)
]
)
# Walk up the context list and add any other completion
# possibilities from chained commands
while ctx.parent is not None:
ctx = ctx.parent
if isinstance(ctx.command, MultiCommand) and ctx.command.chain:
remaining_commands = [
c
for c in get_visible_commands_starting_with(ctx, incomplete)
if c.name not in ctx.protected_args
]
completions_out.extend(
[(c.name, c.get_short_help_str()) for c in remaining_commands]
)
def get_choices(cli, prog_name, args, incomplete):
"""
:param cli: command definition
:param prog_name: the program that is running
:param args: full list of args
:param incomplete: the incomplete text to autocomplete
:return: all the possible completions for the incomplete
"""
all_args = copy.deepcopy(args)
ctx = resolve_ctx(cli, prog_name, args)
if ctx is None:
return []
has_double_dash = "--" in all_args
# In newer versions of bash long opts with '='s are partitioned, but
# it's easier to parse without the '='
if start_of_option(incomplete) and WORDBREAK in incomplete:
partition_incomplete = incomplete.partition(WORDBREAK)
all_args.append(partition_incomplete[0])
incomplete = partition_incomplete[2]
elif incomplete == WORDBREAK:
incomplete = ""
completions = []
if not has_double_dash and start_of_option(incomplete):
# completions for partial options
for param in ctx.command.params:
if isinstance(param, Option) and not param.hidden:
param_opts = [
param_opt
for param_opt in param.opts + param.secondary_opts
if param_opt not in all_args or param.multiple
]
completions.extend(
[(o, param.help) for o in param_opts if o.startswith(incomplete)]
)
return completions
# completion for option values from user supplied values
for param in ctx.command.params:
if is_incomplete_option(all_args, param):
return get_user_autocompletions(ctx, all_args, incomplete, param)
# completion for argument values from user supplied values
for param in ctx.command.params:
if is_incomplete_argument(ctx.params, param):
return get_user_autocompletions(ctx, all_args, incomplete, param)
add_subcommand_completions(ctx, incomplete, completions)
# Sort before returning so that proper ordering can be enforced in custom types.
return sorted(completions)
def do_complete(cli, prog_name, include_descriptions):
cwords = split_arg_string(os.environ["COMP_WORDS"])
cword = int(os.environ["COMP_CWORD"])
args = cwords[1:cword]
try:
incomplete = cwords[cword]
except IndexError:
incomplete = ""
for item in get_choices(cli, prog_name, args, incomplete):
echo(item[0])
if include_descriptions:
# ZSH has trouble dealing with empty array parameters when
# returned from commands, use '_' to indicate no description
# is present.
echo(item[1] if item[1] else "_")
return True
def do_complete_fish(cli, prog_name):
cwords = split_arg_string(os.environ["COMP_WORDS"])
incomplete = os.environ["COMP_CWORD"]
args = cwords[1:]
for item in get_choices(cli, prog_name, args, incomplete):
if item[1]:
echo(f"{item[0]}\t{item[1]}")
else:
echo(item[0])
return True
def bashcomplete(cli, prog_name, complete_var, complete_instr):
if "_" in complete_instr:
command, shell = complete_instr.split("_", 1)
else:
command = complete_instr
shell = "bash"
if command == "source":
echo(get_completion_script(prog_name, complete_var, shell))
return True
elif command == "complete":
if shell == "fish":
return do_complete_fish(cli, prog_name)
elif shell in {"bash", "zsh"}:
return do_complete(cli, prog_name, shell == "zsh")
return False

611
libs/dynaconf/vendor/click/_compat.py vendored Normal file
View File

@ -0,0 +1,611 @@
import codecs
import io
import os
import re
import sys
from weakref import WeakKeyDictionary
CYGWIN = sys.platform.startswith("cygwin")
MSYS2 = sys.platform.startswith("win") and ("GCC" in sys.version)
# Determine local App Engine environment, per Google's own suggestion
APP_ENGINE = "APPENGINE_RUNTIME" in os.environ and "Development/" in os.environ.get(
"SERVER_SOFTWARE", ""
)
WIN = sys.platform.startswith("win") and not APP_ENGINE and not MSYS2
DEFAULT_COLUMNS = 80
auto_wrap_for_ansi = None
colorama = None
get_winterm_size = None
_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
def get_filesystem_encoding():
return sys.getfilesystemencoding() or sys.getdefaultencoding()
def _make_text_stream(
stream, encoding, errors, force_readable=False, force_writable=False
):
if encoding is None:
encoding = get_best_encoding(stream)
if errors is None:
errors = "replace"
return _NonClosingTextIOWrapper(
stream,
encoding,
errors,
line_buffering=True,
force_readable=force_readable,
force_writable=force_writable,
)
def is_ascii_encoding(encoding):
"""Checks if a given encoding is ascii."""
try:
return codecs.lookup(encoding).name == "ascii"
except LookupError:
return False
def get_best_encoding(stream):
"""Returns the default stream encoding if not found."""
rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
if is_ascii_encoding(rv):
return "utf-8"
return rv
class _NonClosingTextIOWrapper(io.TextIOWrapper):
def __init__(
self,
stream,
encoding,
errors,
force_readable=False,
force_writable=False,
**extra,
):
self._stream = stream = _FixupStream(stream, force_readable, force_writable)
super().__init__(stream, encoding, errors, **extra)
def __del__(self):
try:
self.detach()
except Exception:
pass
def isatty(self):
# https://bitbucket.org/pypy/pypy/issue/1803
return self._stream.isatty()
class _FixupStream:
"""The new io interface needs more from streams than streams
traditionally implement. As such, this fix-up code is necessary in
some circumstances.
The forcing of readable and writable flags are there because some tools
put badly patched objects on sys (one such offender are certain version
of jupyter notebook).
"""
def __init__(self, stream, force_readable=False, force_writable=False):
self._stream = stream
self._force_readable = force_readable
self._force_writable = force_writable
def __getattr__(self, name):
return getattr(self._stream, name)
def read1(self, size):
f = getattr(self._stream, "read1", None)
if f is not None:
return f(size)
return self._stream.read(size)
def readable(self):
if self._force_readable:
return True
x = getattr(self._stream, "readable", None)
if x is not None:
return x()
try:
self._stream.read(0)
except Exception:
return False
return True
def writable(self):
if self._force_writable:
return True
x = getattr(self._stream, "writable", None)
if x is not None:
return x()
try:
self._stream.write("")
except Exception:
try:
self._stream.write(b"")
except Exception:
return False
return True
def seekable(self):
x = getattr(self._stream, "seekable", None)
if x is not None:
return x()
try:
self._stream.seek(self._stream.tell())
except Exception:
return False
return True
def is_bytes(x):
return isinstance(x, (bytes, memoryview, bytearray))
def _is_binary_reader(stream, default=False):
try:
return isinstance(stream.read(0), bytes)
except Exception:
return default
# This happens in some cases where the stream was already
# closed. In this case, we assume the default.
def _is_binary_writer(stream, default=False):
try:
stream.write(b"")
except Exception:
try:
stream.write("")
return False
except Exception:
pass
return default
return True
def _find_binary_reader(stream):
# We need to figure out if the given stream is already binary.
# This can happen because the official docs recommend detaching
# the streams to get binary streams. Some code might do this, so
# we need to deal with this case explicitly.
if _is_binary_reader(stream, False):
return stream
buf = getattr(stream, "buffer", None)
# Same situation here; this time we assume that the buffer is
# actually binary in case it's closed.
if buf is not None and _is_binary_reader(buf, True):
return buf
def _find_binary_writer(stream):
# We need to figure out if the given stream is already binary.
# This can happen because the official docs recommend detaching
# the streams to get binary streams. Some code might do this, so
# we need to deal with this case explicitly.
if _is_binary_writer(stream, False):
return stream
buf = getattr(stream, "buffer", None)
# Same situation here; this time we assume that the buffer is
# actually binary in case it's closed.
if buf is not None and _is_binary_writer(buf, True):
return buf
def _stream_is_misconfigured(stream):
"""A stream is misconfigured if its encoding is ASCII."""
# If the stream does not have an encoding set, we assume it's set
# to ASCII. This appears to happen in certain unittest
# environments. It's not quite clear what the correct behavior is
# but this at least will force Click to recover somehow.
return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
def _is_compat_stream_attr(stream, attr, value):
"""A stream attribute is compatible if it is equal to the
desired value or the desired value is unset and the attribute
has a value.
"""
stream_value = getattr(stream, attr, None)
return stream_value == value or (value is None and stream_value is not None)
def _is_compatible_text_stream(stream, encoding, errors):
"""Check if a stream's encoding and errors attributes are
compatible with the desired values.
"""
return _is_compat_stream_attr(
stream, "encoding", encoding
) and _is_compat_stream_attr(stream, "errors", errors)
def _force_correct_text_stream(
text_stream,
encoding,
errors,
is_binary,
find_binary,
force_readable=False,
force_writable=False,
):
if is_binary(text_stream, False):
binary_reader = text_stream
else:
# If the stream looks compatible, and won't default to a
# misconfigured ascii encoding, return it as-is.
if _is_compatible_text_stream(text_stream, encoding, errors) and not (
encoding is None and _stream_is_misconfigured(text_stream)
):
return text_stream
# Otherwise, get the underlying binary reader.
binary_reader = find_binary(text_stream)
# If that's not possible, silently use the original reader
# and get mojibake instead of exceptions.
if binary_reader is None:
return text_stream
# Default errors to replace instead of strict in order to get
# something that works.
if errors is None:
errors = "replace"
# Wrap the binary stream in a text stream with the correct
# encoding parameters.
return _make_text_stream(
binary_reader,
encoding,
errors,
force_readable=force_readable,
force_writable=force_writable,
)
def _force_correct_text_reader(text_reader, encoding, errors, force_readable=False):
return _force_correct_text_stream(
text_reader,
encoding,
errors,
_is_binary_reader,
_find_binary_reader,
force_readable=force_readable,
)
def _force_correct_text_writer(text_writer, encoding, errors, force_writable=False):
return _force_correct_text_stream(
text_writer,
encoding,
errors,
_is_binary_writer,
_find_binary_writer,
force_writable=force_writable,
)
def get_binary_stdin():
reader = _find_binary_reader(sys.stdin)
if reader is None:
raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
return reader
def get_binary_stdout():
writer = _find_binary_writer(sys.stdout)
if writer is None:
raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
return writer
def get_binary_stderr():
writer = _find_binary_writer(sys.stderr)
if writer is None:
raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
return writer
def get_text_stdin(encoding=None, errors=None):
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
if rv is not None:
return rv
return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
def get_text_stdout(encoding=None, errors=None):
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
if rv is not None:
return rv
return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
def get_text_stderr(encoding=None, errors=None):
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
if rv is not None:
return rv
return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
def filename_to_ui(value):
if isinstance(value, bytes):
value = value.decode(get_filesystem_encoding(), "replace")
else:
value = value.encode("utf-8", "surrogateescape").decode("utf-8", "replace")
return value
def get_strerror(e, default=None):
if hasattr(e, "strerror"):
msg = e.strerror
else:
if default is not None:
msg = default
else:
msg = str(e)
if isinstance(msg, bytes):
msg = msg.decode("utf-8", "replace")
return msg
def _wrap_io_open(file, mode, encoding, errors):
"""Handles not passing ``encoding`` and ``errors`` in binary mode."""
if "b" in mode:
return open(file, mode)
return open(file, mode, encoding=encoding, errors=errors)
def open_stream(filename, mode="r", encoding=None, errors="strict", atomic=False):
binary = "b" in mode
# Standard streams first. These are simple because they don't need
# special handling for the atomic flag. It's entirely ignored.
if filename == "-":
if any(m in mode for m in ["w", "a", "x"]):
if binary:
return get_binary_stdout(), False
return get_text_stdout(encoding=encoding, errors=errors), False
if binary:
return get_binary_stdin(), False
return get_text_stdin(encoding=encoding, errors=errors), False
# Non-atomic writes directly go out through the regular open functions.
if not atomic:
return _wrap_io_open(filename, mode, encoding, errors), True
# Some usability stuff for atomic writes
if "a" in mode:
raise ValueError(
"Appending to an existing file is not supported, because that"
" would involve an expensive `copy`-operation to a temporary"
" file. Open the file in normal `w`-mode and copy explicitly"
" if that's what you're after."
)
if "x" in mode:
raise ValueError("Use the `overwrite`-parameter instead.")
if "w" not in mode:
raise ValueError("Atomic writes only make sense with `w`-mode.")
# Atomic writes are more complicated. They work by opening a file
# as a proxy in the same folder and then using the fdopen
# functionality to wrap it in a Python file. Then we wrap it in an
# atomic file that moves the file over on close.
import errno
import random
try:
perm = os.stat(filename).st_mode
except OSError:
perm = None
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
if binary:
flags |= getattr(os, "O_BINARY", 0)
while True:
tmp_filename = os.path.join(
os.path.dirname(filename),
f".__atomic-write{random.randrange(1 << 32):08x}",
)
try:
fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
break
except OSError as e:
if e.errno == errno.EEXIST or (
os.name == "nt"
and e.errno == errno.EACCES
and os.path.isdir(e.filename)
and os.access(e.filename, os.W_OK)
):
continue
raise
if perm is not None:
os.chmod(tmp_filename, perm) # in case perm includes bits in umask
f = _wrap_io_open(fd, mode, encoding, errors)
return _AtomicFile(f, tmp_filename, os.path.realpath(filename)), True
class _AtomicFile:
def __init__(self, f, tmp_filename, real_filename):
self._f = f
self._tmp_filename = tmp_filename
self._real_filename = real_filename
self.closed = False
@property
def name(self):
return self._real_filename
def close(self, delete=False):
if self.closed:
return
self._f.close()
os.replace(self._tmp_filename, self._real_filename)
self.closed = True
def __getattr__(self, name):
return getattr(self._f, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close(delete=exc_type is not None)
def __repr__(self):
return repr(self._f)
def strip_ansi(value):
return _ansi_re.sub("", value)
def _is_jupyter_kernel_output(stream):
if WIN:
# TODO: Couldn't test on Windows, should't try to support until
# someone tests the details wrt colorama.
return
while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
stream = stream._stream
return stream.__class__.__module__.startswith("ipykernel.")
def should_strip_ansi(stream=None, color=None):
if color is None:
if stream is None:
stream = sys.stdin
return not isatty(stream) and not _is_jupyter_kernel_output(stream)
return not color
# If we're on Windows, we provide transparent integration through
# colorama. This will make ANSI colors through the echo function
# work automatically.
if WIN:
# Windows has a smaller terminal
DEFAULT_COLUMNS = 79
from ._winconsole import _get_windows_console_stream
def _get_argv_encoding():
import locale
return locale.getpreferredencoding()
try:
import colorama
except ImportError:
pass
else:
_ansi_stream_wrappers = WeakKeyDictionary()
def auto_wrap_for_ansi(stream, color=None):
"""This function wraps a stream so that calls through colorama
are issued to the win32 console API to recolor on demand. It
also ensures to reset the colors if a write call is interrupted
to not destroy the console afterwards.
"""
try:
cached = _ansi_stream_wrappers.get(stream)
except Exception:
cached = None
if cached is not None:
return cached
strip = should_strip_ansi(stream, color)
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
rv = ansi_wrapper.stream
_write = rv.write
def _safe_write(s):
try:
return _write(s)
except BaseException:
ansi_wrapper.reset_all()
raise
rv.write = _safe_write
try:
_ansi_stream_wrappers[stream] = rv
except Exception:
pass
return rv
def get_winterm_size():
win = colorama.win32.GetConsoleScreenBufferInfo(
colorama.win32.STDOUT
).srWindow
return win.Right - win.Left, win.Bottom - win.Top
else:
def _get_argv_encoding():
return getattr(sys.stdin, "encoding", None) or get_filesystem_encoding()
def _get_windows_console_stream(f, encoding, errors):
return None
def term_len(x):
return len(strip_ansi(x))
def isatty(stream):
try:
return stream.isatty()
except Exception:
return False
def _make_cached_stream_func(src_func, wrapper_func):
cache = WeakKeyDictionary()
def func():
stream = src_func()
try:
rv = cache.get(stream)
except Exception:
rv = None
if rv is not None:
return rv
rv = wrapper_func()
try:
stream = src_func() # In case wrapper_func() modified the stream
cache[stream] = rv
except Exception:
pass
return rv
return func
_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
binary_streams = {
"stdin": get_binary_stdin,
"stdout": get_binary_stdout,
"stderr": get_binary_stderr,
}
text_streams = {
"stdin": get_text_stdin,
"stdout": get_text_stdout,
"stderr": get_text_stderr,
}

View File

@ -0,0 +1,667 @@
"""
This module contains implementations for the termui module. To keep the
import time of Click down, some infrequently used functionality is
placed in this module and only imported as needed.
"""
import contextlib
import math
import os
import sys
import time
from ._compat import _default_text_stdout
from ._compat import CYGWIN
from ._compat import get_best_encoding
from ._compat import isatty
from ._compat import open_stream
from ._compat import strip_ansi
from ._compat import term_len
from ._compat import WIN
from .exceptions import ClickException
from .utils import echo
if os.name == "nt":
BEFORE_BAR = "\r"
AFTER_BAR = "\n"
else:
BEFORE_BAR = "\r\033[?25l"
AFTER_BAR = "\033[?25h\n"
def _length_hint(obj):
"""Returns the length hint of an object."""
try:
return len(obj)
except (AttributeError, TypeError):
try:
get_hint = type(obj).__length_hint__
except AttributeError:
return None
try:
hint = get_hint(obj)
except TypeError:
return None
if hint is NotImplemented or not isinstance(hint, int) or hint < 0:
return None
return hint
class ProgressBar:
def __init__(
self,
iterable,
length=None,
fill_char="#",
empty_char=" ",
bar_template="%(bar)s",
info_sep=" ",
show_eta=True,
show_percent=None,
show_pos=False,
item_show_func=None,
label=None,
file=None,
color=None,
width=30,
):
self.fill_char = fill_char
self.empty_char = empty_char
self.bar_template = bar_template
self.info_sep = info_sep
self.show_eta = show_eta
self.show_percent = show_percent
self.show_pos = show_pos
self.item_show_func = item_show_func
self.label = label or ""
if file is None:
file = _default_text_stdout()
self.file = file
self.color = color
self.width = width
self.autowidth = width == 0
if length is None:
length = _length_hint(iterable)
if iterable is None:
if length is None:
raise TypeError("iterable or length is required")
iterable = range(length)
self.iter = iter(iterable)
self.length = length
self.length_known = length is not None
self.pos = 0
self.avg = []
self.start = self.last_eta = time.time()
self.eta_known = False
self.finished = False
self.max_width = None
self.entered = False
self.current_item = None
self.is_hidden = not isatty(self.file)
self._last_line = None
self.short_limit = 0.5
def __enter__(self):
self.entered = True
self.render_progress()
return self
def __exit__(self, exc_type, exc_value, tb):
self.render_finish()
def __iter__(self):
if not self.entered:
raise RuntimeError("You need to use progress bars in a with block.")
self.render_progress()
return self.generator()
def __next__(self):
# Iteration is defined in terms of a generator function,
# returned by iter(self); use that to define next(). This works
# because `self.iter` is an iterable consumed by that generator,
# so it is re-entry safe. Calling `next(self.generator())`
# twice works and does "what you want".
return next(iter(self))
def is_fast(self):
return time.time() - self.start <= self.short_limit
def render_finish(self):
if self.is_hidden or self.is_fast():
return
self.file.write(AFTER_BAR)
self.file.flush()
@property
def pct(self):
if self.finished:
return 1.0
return min(self.pos / (float(self.length) or 1), 1.0)
@property
def time_per_iteration(self):
if not self.avg:
return 0.0
return sum(self.avg) / float(len(self.avg))
@property
def eta(self):
if self.length_known and not self.finished:
return self.time_per_iteration * (self.length - self.pos)
return 0.0
def format_eta(self):
if self.eta_known:
t = int(self.eta)
seconds = t % 60
t //= 60
minutes = t % 60
t //= 60
hours = t % 24
t //= 24
if t > 0:
return f"{t}d {hours:02}:{minutes:02}:{seconds:02}"
else:
return f"{hours:02}:{minutes:02}:{seconds:02}"
return ""
def format_pos(self):
pos = str(self.pos)
if self.length_known:
pos += f"/{self.length}"
return pos
def format_pct(self):
return f"{int(self.pct * 100): 4}%"[1:]
def format_bar(self):
if self.length_known:
bar_length = int(self.pct * self.width)
bar = self.fill_char * bar_length
bar += self.empty_char * (self.width - bar_length)
elif self.finished:
bar = self.fill_char * self.width
else:
bar = list(self.empty_char * (self.width or 1))
if self.time_per_iteration != 0:
bar[
int(
(math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5)
* self.width
)
] = self.fill_char
bar = "".join(bar)
return bar
def format_progress_line(self):
show_percent = self.show_percent
info_bits = []
if self.length_known and show_percent is None:
show_percent = not self.show_pos
if self.show_pos:
info_bits.append(self.format_pos())
if show_percent:
info_bits.append(self.format_pct())
if self.show_eta and self.eta_known and not self.finished:
info_bits.append(self.format_eta())
if self.item_show_func is not None:
item_info = self.item_show_func(self.current_item)
if item_info is not None:
info_bits.append(item_info)
return (
self.bar_template
% {
"label": self.label,
"bar": self.format_bar(),
"info": self.info_sep.join(info_bits),
}
).rstrip()
def render_progress(self):
from .termui import get_terminal_size
if self.is_hidden:
return
buf = []
# Update width in case the terminal has been resized
if self.autowidth:
old_width = self.width
self.width = 0
clutter_length = term_len(self.format_progress_line())
new_width = max(0, get_terminal_size()[0] - clutter_length)
if new_width < old_width:
buf.append(BEFORE_BAR)
buf.append(" " * self.max_width)
self.max_width = new_width
self.width = new_width
clear_width = self.width
if self.max_width is not None:
clear_width = self.max_width
buf.append(BEFORE_BAR)
line = self.format_progress_line()
line_len = term_len(line)
if self.max_width is None or self.max_width < line_len:
self.max_width = line_len
buf.append(line)
buf.append(" " * (clear_width - line_len))
line = "".join(buf)
# Render the line only if it changed.
if line != self._last_line and not self.is_fast():
self._last_line = line
echo(line, file=self.file, color=self.color, nl=False)
self.file.flush()
def make_step(self, n_steps):
self.pos += n_steps
if self.length_known and self.pos >= self.length:
self.finished = True
if (time.time() - self.last_eta) < 1.0:
return
self.last_eta = time.time()
# self.avg is a rolling list of length <= 7 of steps where steps are
# defined as time elapsed divided by the total progress through
# self.length.
if self.pos:
step = (time.time() - self.start) / self.pos
else:
step = time.time() - self.start
self.avg = self.avg[-6:] + [step]
self.eta_known = self.length_known
def update(self, n_steps, current_item=None):
"""Update the progress bar by advancing a specified number of
steps, and optionally set the ``current_item`` for this new
position.
:param n_steps: Number of steps to advance.
:param current_item: Optional item to set as ``current_item``
for the updated position.
.. versionadded:: 8.0
Added the ``current_item`` optional parameter.
"""
self.make_step(n_steps)
if current_item is not None:
self.current_item = current_item
self.render_progress()
def finish(self):
self.eta_known = 0
self.current_item = None
self.finished = True
def generator(self):
"""Return a generator which yields the items added to the bar
during construction, and updates the progress bar *after* the
yielded block returns.
"""
# WARNING: the iterator interface for `ProgressBar` relies on
# this and only works because this is a simple generator which
# doesn't create or manage additional state. If this function
# changes, the impact should be evaluated both against
# `iter(bar)` and `next(bar)`. `next()` in particular may call
# `self.generator()` repeatedly, and this must remain safe in
# order for that interface to work.
if not self.entered:
raise RuntimeError("You need to use progress bars in a with block.")
if self.is_hidden:
yield from self.iter
else:
for rv in self.iter:
self.current_item = rv
yield rv
self.update(1)
self.finish()
self.render_progress()
def pager(generator, color=None):
"""Decide what method to use for paging through text."""
stdout = _default_text_stdout()
if not isatty(sys.stdin) or not isatty(stdout):
return _nullpager(stdout, generator, color)
pager_cmd = (os.environ.get("PAGER", None) or "").strip()
if pager_cmd:
if WIN:
return _tempfilepager(generator, pager_cmd, color)
return _pipepager(generator, pager_cmd, color)
if os.environ.get("TERM") in ("dumb", "emacs"):
return _nullpager(stdout, generator, color)
if WIN or sys.platform.startswith("os2"):
return _tempfilepager(generator, "more <", color)
if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0:
return _pipepager(generator, "less", color)
import tempfile
fd, filename = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, "system") and os.system(f'more "{filename}"') == 0:
return _pipepager(generator, "more", color)
return _nullpager(stdout, generator, color)
finally:
os.unlink(filename)
def _pipepager(generator, cmd, color):
"""Page through text by feeding it to another program. Invoking a
pager through this might support colors.
"""
import subprocess
env = dict(os.environ)
# If we're piping to less we might support colors under the
# condition that
cmd_detail = cmd.rsplit("/", 1)[-1].split()
if color is None and cmd_detail[0] == "less":
less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}"
if not less_flags:
env["LESS"] = "-R"
color = True
elif "r" in less_flags or "R" in less_flags:
color = True
c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env)
encoding = get_best_encoding(c.stdin)
try:
for text in generator:
if not color:
text = strip_ansi(text)
c.stdin.write(text.encode(encoding, "replace"))
except (OSError, KeyboardInterrupt):
pass
else:
c.stdin.close()
# Less doesn't respect ^C, but catches it for its own UI purposes (aborting
# search or other commands inside less).
#
# That means when the user hits ^C, the parent process (click) terminates,
# but less is still alive, paging the output and messing up the terminal.
#
# If the user wants to make the pager exit on ^C, they should set
# `LESS='-K'`. It's not our decision to make.
while True:
try:
c.wait()
except KeyboardInterrupt:
pass
else:
break
def _tempfilepager(generator, cmd, color):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
# TODO: This never terminates if the passed generator never terminates.
text = "".join(generator)
if not color:
text = strip_ansi(text)
encoding = get_best_encoding(sys.stdout)
with open_stream(filename, "wb")[0] as f:
f.write(text.encode(encoding))
try:
os.system(f'{cmd} "{filename}"')
finally:
os.unlink(filename)
def _nullpager(stream, generator, color):
"""Simply print unformatted text. This is the ultimate fallback."""
for text in generator:
if not color:
text = strip_ansi(text)
stream.write(text)
class Editor:
def __init__(self, editor=None, env=None, require_save=True, extension=".txt"):
self.editor = editor
self.env = env
self.require_save = require_save
self.extension = extension
def get_editor(self):
if self.editor is not None:
return self.editor
for key in "VISUAL", "EDITOR":
rv = os.environ.get(key)
if rv:
return rv
if WIN:
return "notepad"
for editor in "sensible-editor", "vim", "nano":
if os.system(f"which {editor} >/dev/null 2>&1") == 0:
return editor
return "vi"
def edit_file(self, filename):
import subprocess
editor = self.get_editor()
if self.env:
environ = os.environ.copy()
environ.update(self.env)
else:
environ = None
try:
c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True)
exit_code = c.wait()
if exit_code != 0:
raise ClickException(f"{editor}: Editing failed!")
except OSError as e:
raise ClickException(f"{editor}: Editing failed: {e}")
def edit(self, text):
import tempfile
text = text or ""
binary_data = type(text) in [bytes, bytearray]
if not binary_data and text and not text.endswith("\n"):
text += "\n"
fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension)
try:
if not binary_data:
if WIN:
encoding = "utf-8-sig"
text = text.replace("\n", "\r\n")
else:
encoding = "utf-8"
text = text.encode(encoding)
f = os.fdopen(fd, "wb")
f.write(text)
f.close()
timestamp = os.path.getmtime(name)
self.edit_file(name)
if self.require_save and os.path.getmtime(name) == timestamp:
return None
f = open(name, "rb")
try:
rv = f.read()
finally:
f.close()
if binary_data:
return rv
else:
return rv.decode("utf-8-sig").replace("\r\n", "\n")
finally:
os.unlink(name)
def open_url(url, wait=False, locate=False):
import subprocess
def _unquote_file(url):
import urllib
if url.startswith("file://"):
url = urllib.unquote(url[7:])
return url
if sys.platform == "darwin":
args = ["open"]
if wait:
args.append("-W")
if locate:
args.append("-R")
args.append(_unquote_file(url))
null = open("/dev/null", "w")
try:
return subprocess.Popen(args, stderr=null).wait()
finally:
null.close()
elif WIN:
if locate:
url = _unquote_file(url.replace('"', ""))
args = f'explorer /select,"{url}"'
else:
url = url.replace('"', "")
wait = "/WAIT" if wait else ""
args = f'start {wait} "" "{url}"'
return os.system(args)
elif CYGWIN:
if locate:
url = os.path.dirname(_unquote_file(url).replace('"', ""))
args = f'cygstart "{url}"'
else:
url = url.replace('"', "")
wait = "-w" if wait else ""
args = f'cygstart {wait} "{url}"'
return os.system(args)
try:
if locate:
url = os.path.dirname(_unquote_file(url)) or "."
else:
url = _unquote_file(url)
c = subprocess.Popen(["xdg-open", url])
if wait:
return c.wait()
return 0
except OSError:
if url.startswith(("http://", "https://")) and not locate and not wait:
import webbrowser
webbrowser.open(url)
return 0
return 1
def _translate_ch_to_exc(ch):
if ch == "\x03":
raise KeyboardInterrupt()
if ch == "\x04" and not WIN: # Unix-like, Ctrl+D
raise EOFError()
if ch == "\x1a" and WIN: # Windows, Ctrl+Z
raise EOFError()
if WIN:
import msvcrt
@contextlib.contextmanager
def raw_terminal():
yield
def getchar(echo):
# The function `getch` will return a bytes object corresponding to
# the pressed character. Since Windows 10 build 1803, it will also
# return \x00 when called a second time after pressing a regular key.
#
# `getwch` does not share this probably-bugged behavior. Moreover, it
# returns a Unicode object by default, which is what we want.
#
# Either of these functions will return \x00 or \xe0 to indicate
# a special key, and you need to call the same function again to get
# the "rest" of the code. The fun part is that \u00e0 is
# "latin small letter a with grave", so if you type that on a French
# keyboard, you _also_ get a \xe0.
# E.g., consider the Up arrow. This returns \xe0 and then \x48. The
# resulting Unicode string reads as "a with grave" + "capital H".
# This is indistinguishable from when the user actually types
# "a with grave" and then "capital H".
#
# When \xe0 is returned, we assume it's part of a special-key sequence
# and call `getwch` again, but that means that when the user types
# the \u00e0 character, `getchar` doesn't return until a second
# character is typed.
# The alternative is returning immediately, but that would mess up
# cross-platform handling of arrow keys and others that start with
# \xe0. Another option is using `getch`, but then we can't reliably
# read non-ASCII characters, because return values of `getch` are
# limited to the current 8-bit codepage.
#
# Anyway, Click doesn't claim to do this Right(tm), and using `getwch`
# is doing the right thing in more situations than with `getch`.
if echo:
func = msvcrt.getwche
else:
func = msvcrt.getwch
rv = func()
if rv in ("\x00", "\xe0"):
# \x00 and \xe0 are control characters that indicate special key,
# see above.
rv += func()
_translate_ch_to_exc(rv)
return rv
else:
import tty
import termios
@contextlib.contextmanager
def raw_terminal():
if not isatty(sys.stdin):
f = open("/dev/tty")
fd = f.fileno()
else:
fd = sys.stdin.fileno()
f = None
try:
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
yield fd
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
sys.stdout.flush()
if f is not None:
f.close()
except termios.error:
pass
def getchar(echo):
with raw_terminal() as fd:
ch = os.read(fd, 32)
ch = ch.decode(get_best_encoding(sys.stdin), "replace")
if echo and isatty(sys.stdout):
sys.stdout.write(ch)
_translate_ch_to_exc(ch)
return ch

37
libs/dynaconf/vendor/click/_textwrap.py vendored Normal file
View File

@ -0,0 +1,37 @@
import textwrap
from contextlib import contextmanager
class TextWrapper(textwrap.TextWrapper):
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
space_left = max(width - cur_len, 1)
if self.break_long_words:
last = reversed_chunks[-1]
cut = last[:space_left]
res = last[space_left:]
cur_line.append(cut)
reversed_chunks[-1] = res
elif not cur_line:
cur_line.append(reversed_chunks.pop())
@contextmanager
def extra_indent(self, indent):
old_initial_indent = self.initial_indent
old_subsequent_indent = self.subsequent_indent
self.initial_indent += indent
self.subsequent_indent += indent
try:
yield
finally:
self.initial_indent = old_initial_indent
self.subsequent_indent = old_subsequent_indent
def indent_only(self, text):
rv = []
for idx, line in enumerate(text.splitlines()):
indent = self.initial_indent
if idx > 0:
indent = self.subsequent_indent
rv.append(f"{indent}{line}")
return "\n".join(rv)

View File

@ -0,0 +1,82 @@
import codecs
import os
def _verify_python_env():
"""Ensures that the environment is good for Unicode."""
try:
import locale
fs_enc = codecs.lookup(locale.getpreferredencoding()).name
except Exception:
fs_enc = "ascii"
if fs_enc != "ascii":
return
extra = ""
if os.name == "posix":
import subprocess
try:
rv = subprocess.Popen(
["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
).communicate()[0]
except OSError:
rv = b""
good_locales = set()
has_c_utf8 = False
# Make sure we're operating on text here.
if isinstance(rv, bytes):
rv = rv.decode("ascii", "replace")
for line in rv.splitlines():
locale = line.strip()
if locale.lower().endswith((".utf-8", ".utf8")):
good_locales.add(locale)
if locale.lower() in ("c.utf8", "c.utf-8"):
has_c_utf8 = True
extra += "\n\n"
if not good_locales:
extra += (
"Additional information: on this system no suitable"
" UTF-8 locales were discovered. This most likely"
" requires resolving by reconfiguring the locale"
" system."
)
elif has_c_utf8:
extra += (
"This system supports the C.UTF-8 locale which is"
" recommended. You might be able to resolve your issue"
" by exporting the following environment variables:\n\n"
" export LC_ALL=C.UTF-8\n"
" export LANG=C.UTF-8"
)
else:
extra += (
"This system lists some UTF-8 supporting locales that"
" you can pick from. The following suitable locales"
f" were discovered: {', '.join(sorted(good_locales))}"
)
bad_locale = None
for locale in os.environ.get("LC_ALL"), os.environ.get("LANG"):
if locale and locale.lower().endswith((".utf-8", ".utf8")):
bad_locale = locale
if locale is not None:
break
if bad_locale is not None:
extra += (
"\n\nClick discovered that you exported a UTF-8 locale"
" but the locale system could not pick up from it"
" because it does not exist. The exported locale is"
f" {bad_locale!r} but it is not supported"
)
raise RuntimeError(
"Click will abort further execution because Python was"
" configured to use ASCII as encoding for the environment."
" Consult https://click.palletsprojects.com/unicode-support/"
f" for mitigation steps.{extra}"
)

View File

@ -0,0 +1,308 @@
# This module is based on the excellent work by Adam Bartoš who
# provided a lot of what went into the implementation here in
# the discussion to issue1602 in the Python bug tracker.
#
# There are some general differences in regards to how this works
# compared to the original patches as we do not need to patch
# the entire interpreter but just work in our little world of
# echo and prompt.
import ctypes
import io
import time
from ctypes import byref
from ctypes import c_char
from ctypes import c_char_p
from ctypes import c_int
from ctypes import c_ssize_t
from ctypes import c_ulong
from ctypes import c_void_p
from ctypes import POINTER
from ctypes import py_object
from ctypes import windll
from ctypes import WINFUNCTYPE
from ctypes.wintypes import DWORD
from ctypes.wintypes import HANDLE
from ctypes.wintypes import LPCWSTR
from ctypes.wintypes import LPWSTR
import msvcrt
from ._compat import _NonClosingTextIOWrapper
try:
from ctypes import pythonapi
except ImportError:
pythonapi = None
else:
PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
PyBuffer_Release = pythonapi.PyBuffer_Release
c_ssize_p = POINTER(c_ssize_t)
kernel32 = windll.kernel32
GetStdHandle = kernel32.GetStdHandle
ReadConsoleW = kernel32.ReadConsoleW
WriteConsoleW = kernel32.WriteConsoleW
GetConsoleMode = kernel32.GetConsoleMode
GetLastError = kernel32.GetLastError
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32))
CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))(
("CommandLineToArgvW", windll.shell32)
)
LocalFree = WINFUNCTYPE(ctypes.c_void_p, ctypes.c_void_p)(
("LocalFree", windll.kernel32)
)
STDIN_HANDLE = GetStdHandle(-10)
STDOUT_HANDLE = GetStdHandle(-11)
STDERR_HANDLE = GetStdHandle(-12)
PyBUF_SIMPLE = 0
PyBUF_WRITABLE = 1
ERROR_SUCCESS = 0
ERROR_NOT_ENOUGH_MEMORY = 8
ERROR_OPERATION_ABORTED = 995
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
EOF = b"\x1a"
MAX_BYTES_WRITTEN = 32767
class Py_buffer(ctypes.Structure):
_fields_ = [
("buf", c_void_p),
("obj", py_object),
("len", c_ssize_t),
("itemsize", c_ssize_t),
("readonly", c_int),
("ndim", c_int),
("format", c_char_p),
("shape", c_ssize_p),
("strides", c_ssize_p),
("suboffsets", c_ssize_p),
("internal", c_void_p),
]
# On PyPy we cannot get buffers so our ability to operate here is
# severely limited.
if pythonapi is None:
get_buffer = None
else:
def get_buffer(obj, writable=False):
buf = Py_buffer()
flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE
PyObject_GetBuffer(py_object(obj), byref(buf), flags)
try:
buffer_type = c_char * buf.len
return buffer_type.from_address(buf.buf)
finally:
PyBuffer_Release(byref(buf))
class _WindowsConsoleRawIOBase(io.RawIOBase):
def __init__(self, handle):
self.handle = handle
def isatty(self):
io.RawIOBase.isatty(self)
return True
class _WindowsConsoleReader(_WindowsConsoleRawIOBase):
def readable(self):
return True
def readinto(self, b):
bytes_to_be_read = len(b)
if not bytes_to_be_read:
return 0
elif bytes_to_be_read % 2:
raise ValueError(
"cannot read odd number of bytes from UTF-16-LE encoded console"
)
buffer = get_buffer(b, writable=True)
code_units_to_be_read = bytes_to_be_read // 2
code_units_read = c_ulong()
rv = ReadConsoleW(
HANDLE(self.handle),
buffer,
code_units_to_be_read,
byref(code_units_read),
None,
)
if GetLastError() == ERROR_OPERATION_ABORTED:
# wait for KeyboardInterrupt
time.sleep(0.1)
if not rv:
raise OSError(f"Windows error: {GetLastError()}")
if buffer[0] == EOF:
return 0
return 2 * code_units_read.value
class _WindowsConsoleWriter(_WindowsConsoleRawIOBase):
def writable(self):
return True
@staticmethod
def _get_error_message(errno):
if errno == ERROR_SUCCESS:
return "ERROR_SUCCESS"
elif errno == ERROR_NOT_ENOUGH_MEMORY:
return "ERROR_NOT_ENOUGH_MEMORY"
return f"Windows error {errno}"
def write(self, b):
bytes_to_be_written = len(b)
buf = get_buffer(b)
code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
code_units_written = c_ulong()
WriteConsoleW(
HANDLE(self.handle),
buf,
code_units_to_be_written,
byref(code_units_written),
None,
)
bytes_written = 2 * code_units_written.value
if bytes_written == 0 and bytes_to_be_written > 0:
raise OSError(self._get_error_message(GetLastError()))
return bytes_written
class ConsoleStream:
def __init__(self, text_stream, byte_stream):
self._text_stream = text_stream
self.buffer = byte_stream
@property
def name(self):
return self.buffer.name
def write(self, x):
if isinstance(x, str):
return self._text_stream.write(x)
try:
self.flush()
except Exception:
pass
return self.buffer.write(x)
def writelines(self, lines):
for line in lines:
self.write(line)
def __getattr__(self, name):
return getattr(self._text_stream, name)
def isatty(self):
return self.buffer.isatty()
def __repr__(self):
return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>"
class WindowsChunkedWriter:
"""
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()' which we wrap to write in
limited chunks due to a Windows limitation on binary console streams.
"""
def __init__(self, wrapped):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
total_to_write = len(text)
written = 0
while written < total_to_write:
to_write = min(total_to_write - written, MAX_BYTES_WRITTEN)
self.__wrapped.write(text[written : written + to_write])
written += to_write
def _get_text_stdin(buffer_stream):
text_stream = _NonClosingTextIOWrapper(
io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return ConsoleStream(text_stream, buffer_stream)
def _get_text_stdout(buffer_stream):
text_stream = _NonClosingTextIOWrapper(
io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return ConsoleStream(text_stream, buffer_stream)
def _get_text_stderr(buffer_stream):
text_stream = _NonClosingTextIOWrapper(
io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)),
"utf-16-le",
"strict",
line_buffering=True,
)
return ConsoleStream(text_stream, buffer_stream)
_stream_factories = {
0: _get_text_stdin,
1: _get_text_stdout,
2: _get_text_stderr,
}
def _is_console(f):
if not hasattr(f, "fileno"):
return False
try:
fileno = f.fileno()
except OSError:
return False
handle = msvcrt.get_osfhandle(fileno)
return bool(GetConsoleMode(handle, byref(DWORD())))
def _get_windows_console_stream(f, encoding, errors):
if (
get_buffer is not None
and encoding in {"utf-16-le", None}
and errors in {"strict", None}
and _is_console(f)
):
func = _stream_factories.get(f.fileno())
if func is not None:
f = getattr(f, "buffer", None)
if f is None:
return None
return func(f)

2070
libs/dynaconf/vendor/click/core.py vendored Normal file

File diff suppressed because it is too large Load Diff

331
libs/dynaconf/vendor/click/decorators.py vendored Normal file
View File

@ -0,0 +1,331 @@
import inspect
import sys
from functools import update_wrapper
from .core import Argument
from .core import Command
from .core import Group
from .core import Option
from .globals import get_current_context
from .utils import echo
def pass_context(f):
"""Marks a callback as wanting to receive the current context
object as first argument.
"""
def new_func(*args, **kwargs):
return f(get_current_context(), *args, **kwargs)
return update_wrapper(new_func, f)
def pass_obj(f):
"""Similar to :func:`pass_context`, but only pass the object on the
context onwards (:attr:`Context.obj`). This is useful if that object
represents the state of a nested system.
"""
def new_func(*args, **kwargs):
return f(get_current_context().obj, *args, **kwargs)
return update_wrapper(new_func, f)
def make_pass_decorator(object_type, ensure=False):
"""Given an object type this creates a decorator that will work
similar to :func:`pass_obj` but instead of passing the object of the
current context, it will find the innermost context of type
:func:`object_type`.
This generates a decorator that works roughly like this::
from functools import update_wrapper
def decorator(f):
@pass_context
def new_func(ctx, *args, **kwargs):
obj = ctx.find_object(object_type)
return ctx.invoke(f, obj, *args, **kwargs)
return update_wrapper(new_func, f)
return decorator
:param object_type: the type of the object to pass.
:param ensure: if set to `True`, a new object will be created and
remembered on the context if it's not there yet.
"""
def decorator(f):
def new_func(*args, **kwargs):
ctx = get_current_context()
if ensure:
obj = ctx.ensure_object(object_type)
else:
obj = ctx.find_object(object_type)
if obj is None:
raise RuntimeError(
"Managed to invoke callback without a context"
f" object of type {object_type.__name__!r}"
" existing."
)
return ctx.invoke(f, obj, *args, **kwargs)
return update_wrapper(new_func, f)
return decorator
def _make_command(f, name, attrs, cls):
if isinstance(f, Command):
raise TypeError("Attempted to convert a callback into a command twice.")
try:
params = f.__click_params__
params.reverse()
del f.__click_params__
except AttributeError:
params = []
help = attrs.get("help")
if help is None:
help = inspect.getdoc(f)
if isinstance(help, bytes):
help = help.decode("utf-8")
else:
help = inspect.cleandoc(help)
attrs["help"] = help
return cls(
name=name or f.__name__.lower().replace("_", "-"),
callback=f,
params=params,
**attrs,
)
def command(name=None, cls=None, **attrs):
r"""Creates a new :class:`Command` and uses the decorated function as
callback. This will also automatically attach all decorated
:func:`option`\s and :func:`argument`\s as parameters to the command.
The name of the command defaults to the name of the function with
underscores replaced by dashes. If you want to change that, you can
pass the intended name as the first argument.
All keyword arguments are forwarded to the underlying command class.
Once decorated the function turns into a :class:`Command` instance
that can be invoked as a command line utility or be attached to a
command :class:`Group`.
:param name: the name of the command. This defaults to the function
name with underscores replaced by dashes.
:param cls: the command class to instantiate. This defaults to
:class:`Command`.
"""
if cls is None:
cls = Command
def decorator(f):
cmd = _make_command(f, name, attrs, cls)
cmd.__doc__ = f.__doc__
return cmd
return decorator
def group(name=None, **attrs):
"""Creates a new :class:`Group` with a function as callback. This
works otherwise the same as :func:`command` just that the `cls`
parameter is set to :class:`Group`.
"""
attrs.setdefault("cls", Group)
return command(name, **attrs)
def _param_memo(f, param):
if isinstance(f, Command):
f.params.append(param)
else:
if not hasattr(f, "__click_params__"):
f.__click_params__ = []
f.__click_params__.append(param)
def argument(*param_decls, **attrs):
"""Attaches an argument to the command. All positional arguments are
passed as parameter declarations to :class:`Argument`; all keyword
arguments are forwarded unchanged (except ``cls``).
This is equivalent to creating an :class:`Argument` instance manually
and attaching it to the :attr:`Command.params` list.
:param cls: the argument class to instantiate. This defaults to
:class:`Argument`.
"""
def decorator(f):
ArgumentClass = attrs.pop("cls", Argument)
_param_memo(f, ArgumentClass(param_decls, **attrs))
return f
return decorator
def option(*param_decls, **attrs):
"""Attaches an option to the command. All positional arguments are
passed as parameter declarations to :class:`Option`; all keyword
arguments are forwarded unchanged (except ``cls``).
This is equivalent to creating an :class:`Option` instance manually
and attaching it to the :attr:`Command.params` list.
:param cls: the option class to instantiate. This defaults to
:class:`Option`.
"""
def decorator(f):
# Issue 926, copy attrs, so pre-defined options can re-use the same cls=
option_attrs = attrs.copy()
if "help" in option_attrs:
option_attrs["help"] = inspect.cleandoc(option_attrs["help"])
OptionClass = option_attrs.pop("cls", Option)
_param_memo(f, OptionClass(param_decls, **option_attrs))
return f
return decorator
def confirmation_option(*param_decls, **attrs):
"""Shortcut for confirmation prompts that can be ignored by passing
``--yes`` as parameter.
This is equivalent to decorating a function with :func:`option` with
the following parameters::
def callback(ctx, param, value):
if not value:
ctx.abort()
@click.command()
@click.option('--yes', is_flag=True, callback=callback,
expose_value=False, prompt='Do you want to continue?')
def dropdb():
pass
"""
def decorator(f):
def callback(ctx, param, value):
if not value:
ctx.abort()
attrs.setdefault("is_flag", True)
attrs.setdefault("callback", callback)
attrs.setdefault("expose_value", False)
attrs.setdefault("prompt", "Do you want to continue?")
attrs.setdefault("help", "Confirm the action without prompting.")
return option(*(param_decls or ("--yes",)), **attrs)(f)
return decorator
def password_option(*param_decls, **attrs):
"""Shortcut for password prompts.
This is equivalent to decorating a function with :func:`option` with
the following parameters::
@click.command()
@click.option('--password', prompt=True, confirmation_prompt=True,
hide_input=True)
def changeadmin(password):
pass
"""
def decorator(f):
attrs.setdefault("prompt", True)
attrs.setdefault("confirmation_prompt", True)
attrs.setdefault("hide_input", True)
return option(*(param_decls or ("--password",)), **attrs)(f)
return decorator
def version_option(version=None, *param_decls, **attrs):
"""Adds a ``--version`` option which immediately ends the program
printing out the version number. This is implemented as an eager
option that prints the version and exits the program in the callback.
:param version: the version number to show. If not provided Click
attempts an auto discovery via setuptools.
:param prog_name: the name of the program (defaults to autodetection)
:param message: custom message to show instead of the default
(``'%(prog)s, version %(version)s'``)
:param others: everything else is forwarded to :func:`option`.
"""
if version is None:
if hasattr(sys, "_getframe"):
module = sys._getframe(1).f_globals.get("__name__")
else:
module = ""
def decorator(f):
prog_name = attrs.pop("prog_name", None)
message = attrs.pop("message", "%(prog)s, version %(version)s")
def callback(ctx, param, value):
if not value or ctx.resilient_parsing:
return
prog = prog_name
if prog is None:
prog = ctx.find_root().info_name
ver = version
if ver is None:
try:
import pkg_resources
except ImportError:
pass
else:
for dist in pkg_resources.working_set:
scripts = dist.get_entry_map().get("console_scripts") or {}
for entry_point in scripts.values():
if entry_point.module_name == module:
ver = dist.version
break
if ver is None:
raise RuntimeError("Could not determine version")
echo(message % {"prog": prog, "version": ver}, color=ctx.color)
ctx.exit()
attrs.setdefault("is_flag", True)
attrs.setdefault("expose_value", False)
attrs.setdefault("is_eager", True)
attrs.setdefault("help", "Show the version and exit.")
attrs["callback"] = callback
return option(*(param_decls or ("--version",)), **attrs)(f)
return decorator
def help_option(*param_decls, **attrs):
"""Adds a ``--help`` option which immediately ends the program
printing out the help page. This is usually unnecessary to add as
this is added by default to all commands unless suppressed.
Like :func:`version_option`, this is implemented as eager option that
prints in the callback and exits.
All arguments are forwarded to :func:`option`.
"""
def decorator(f):
def callback(ctx, param, value):
if value and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
attrs.setdefault("is_flag", True)
attrs.setdefault("expose_value", False)
attrs.setdefault("help", "Show this message and exit.")
attrs.setdefault("is_eager", True)
attrs["callback"] = callback
return option(*(param_decls or ("--help",)), **attrs)(f)
return decorator

233
libs/dynaconf/vendor/click/exceptions.py vendored Normal file
View File

@ -0,0 +1,233 @@
from ._compat import filename_to_ui
from ._compat import get_text_stderr
from .utils import echo
def _join_param_hints(param_hint):
if isinstance(param_hint, (tuple, list)):
return " / ".join(repr(x) for x in param_hint)
return param_hint
class ClickException(Exception):
"""An exception that Click can handle and show to the user."""
#: The exit code for this exception.
exit_code = 1
def __init__(self, message):
super().__init__(message)
self.message = message
def format_message(self):
return self.message
def __str__(self):
return self.message
def show(self, file=None):
if file is None:
file = get_text_stderr()
echo(f"Error: {self.format_message()}", file=file)
class UsageError(ClickException):
"""An internal exception that signals a usage error. This typically
aborts any further handling.
:param message: the error message to display.
:param ctx: optionally the context that caused this error. Click will
fill in the context automatically in some situations.
"""
exit_code = 2
def __init__(self, message, ctx=None):
ClickException.__init__(self, message)
self.ctx = ctx
self.cmd = self.ctx.command if self.ctx else None
def show(self, file=None):
if file is None:
file = get_text_stderr()
color = None
hint = ""
if self.cmd is not None and self.cmd.get_help_option(self.ctx) is not None:
hint = (
f"Try '{self.ctx.command_path}"
f" {self.ctx.help_option_names[0]}' for help.\n"
)
if self.ctx is not None:
color = self.ctx.color
echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color)
echo(f"Error: {self.format_message()}", file=file, color=color)
class BadParameter(UsageError):
"""An exception that formats out a standardized error message for a
bad parameter. This is useful when thrown from a callback or type as
Click will attach contextual information to it (for instance, which
parameter it is).
.. versionadded:: 2.0
:param param: the parameter object that caused this error. This can
be left out, and Click will attach this info itself
if possible.
:param param_hint: a string that shows up as parameter name. This
can be used as alternative to `param` in cases
where custom validation should happen. If it is
a string it's used as such, if it's a list then
each item is quoted and separated.
"""
def __init__(self, message, ctx=None, param=None, param_hint=None):
UsageError.__init__(self, message, ctx)
self.param = param
self.param_hint = param_hint
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.get_error_hint(self.ctx)
else:
return f"Invalid value: {self.message}"
param_hint = _join_param_hints(param_hint)
return f"Invalid value for {param_hint}: {self.message}"
class MissingParameter(BadParameter):
"""Raised if click required an option or argument but it was not
provided when invoking the script.
.. versionadded:: 4.0
:param param_type: a string that indicates the type of the parameter.
The default is to inherit the parameter type from
the given `param`. Valid values are ``'parameter'``,
``'option'`` or ``'argument'``.
"""
def __init__(
self, message=None, ctx=None, param=None, param_hint=None, param_type=None
):
BadParameter.__init__(self, message, ctx, param, param_hint)
self.param_type = param_type
def format_message(self):
if self.param_hint is not None:
param_hint = self.param_hint
elif self.param is not None:
param_hint = self.param.get_error_hint(self.ctx)
else:
param_hint = None
param_hint = _join_param_hints(param_hint)
param_type = self.param_type
if param_type is None and self.param is not None:
param_type = self.param.param_type_name
msg = self.message
if self.param is not None:
msg_extra = self.param.type.get_missing_message(self.param)
if msg_extra:
if msg:
msg += f". {msg_extra}"
else:
msg = msg_extra
hint_str = f" {param_hint}" if param_hint else ""
return f"Missing {param_type}{hint_str}.{' ' if msg else ''}{msg or ''}"
def __str__(self):
if self.message is None:
param_name = self.param.name if self.param else None
return f"missing parameter: {param_name}"
else:
return self.message
class NoSuchOption(UsageError):
"""Raised if click attempted to handle an option that does not
exist.
.. versionadded:: 4.0
"""
def __init__(self, option_name, message=None, possibilities=None, ctx=None):
if message is None:
message = f"no such option: {option_name}"
UsageError.__init__(self, message, ctx)
self.option_name = option_name
self.possibilities = possibilities
def format_message(self):
bits = [self.message]
if self.possibilities:
if len(self.possibilities) == 1:
bits.append(f"Did you mean {self.possibilities[0]}?")
else:
possibilities = sorted(self.possibilities)
bits.append(f"(Possible options: {', '.join(possibilities)})")
return " ".join(bits)
class BadOptionUsage(UsageError):
"""Raised if an option is generally supplied but the use of the option
was incorrect. This is for instance raised if the number of arguments
for an option is not correct.
.. versionadded:: 4.0
:param option_name: the name of the option being used incorrectly.
"""
def __init__(self, option_name, message, ctx=None):
UsageError.__init__(self, message, ctx)
self.option_name = option_name
class BadArgumentUsage(UsageError):
"""Raised if an argument is generally supplied but the use of the argument
was incorrect. This is for instance raised if the number of values
for an argument is not correct.
.. versionadded:: 6.0
"""
def __init__(self, message, ctx=None):
UsageError.__init__(self, message, ctx)
class FileError(ClickException):
"""Raised if a file cannot be opened."""
def __init__(self, filename, hint=None):
ui_filename = filename_to_ui(filename)
if hint is None:
hint = "unknown error"
ClickException.__init__(self, hint)
self.ui_filename = ui_filename
self.filename = filename
def format_message(self):
return f"Could not open file {self.ui_filename}: {self.message}"
class Abort(RuntimeError):
"""An internal signalling exception that signals Click to abort."""
class Exit(RuntimeError):
"""An exception that indicates that the application should exit with some
status code.
:param code: the status code to exit with.
"""
__slots__ = ("exit_code",)
def __init__(self, code=0):
self.exit_code = code

279
libs/dynaconf/vendor/click/formatting.py vendored Normal file
View File

@ -0,0 +1,279 @@
from contextlib import contextmanager
from ._compat import term_len
from .parser import split_opt
from .termui import get_terminal_size
# Can force a width. This is used by the test system
FORCED_WIDTH = None
def measure_table(rows):
widths = {}
for row in rows:
for idx, col in enumerate(row):
widths[idx] = max(widths.get(idx, 0), term_len(col))
return tuple(y for x, y in sorted(widths.items()))
def iter_rows(rows, col_count):
for row in rows:
row = tuple(row)
yield row + ("",) * (col_count - len(row))
def wrap_text(
text, width=78, initial_indent="", subsequent_indent="", preserve_paragraphs=False
):
"""A helper function that intelligently wraps text. By default, it
assumes that it operates on a single paragraph of text but if the
`preserve_paragraphs` parameter is provided it will intelligently
handle paragraphs (defined by two empty lines).
If paragraphs are handled, a paragraph can be prefixed with an empty
line containing the ``\\b`` character (``\\x08``) to indicate that
no rewrapping should happen in that block.
:param text: the text that should be rewrapped.
:param width: the maximum width for the text.
:param initial_indent: the initial indent that should be placed on the
first line as a string.
:param subsequent_indent: the indent string that should be placed on
each consecutive line.
:param preserve_paragraphs: if this flag is set then the wrapping will
intelligently handle paragraphs.
"""
from ._textwrap import TextWrapper
text = text.expandtabs()
wrapper = TextWrapper(
width,
initial_indent=initial_indent,
subsequent_indent=subsequent_indent,
replace_whitespace=False,
)
if not preserve_paragraphs:
return wrapper.fill(text)
p = []
buf = []
indent = None
def _flush_par():
if not buf:
return
if buf[0].strip() == "\b":
p.append((indent or 0, True, "\n".join(buf[1:])))
else:
p.append((indent or 0, False, " ".join(buf)))
del buf[:]
for line in text.splitlines():
if not line:
_flush_par()
indent = None
else:
if indent is None:
orig_len = term_len(line)
line = line.lstrip()
indent = orig_len - term_len(line)
buf.append(line)
_flush_par()
rv = []
for indent, raw, text in p:
with wrapper.extra_indent(" " * indent):
if raw:
rv.append(wrapper.indent_only(text))
else:
rv.append(wrapper.fill(text))
return "\n\n".join(rv)
class HelpFormatter:
"""This class helps with formatting text-based help pages. It's
usually just needed for very special internal cases, but it's also
exposed so that developers can write their own fancy outputs.
At present, it always writes into memory.
:param indent_increment: the additional increment for each level.
:param width: the width for the text. This defaults to the terminal
width clamped to a maximum of 78.
"""
def __init__(self, indent_increment=2, width=None, max_width=None):
self.indent_increment = indent_increment
if max_width is None:
max_width = 80
if width is None:
width = FORCED_WIDTH
if width is None:
width = max(min(get_terminal_size()[0], max_width) - 2, 50)
self.width = width
self.current_indent = 0
self.buffer = []
def write(self, string):
"""Writes a unicode string into the internal buffer."""
self.buffer.append(string)
def indent(self):
"""Increases the indentation."""
self.current_indent += self.indent_increment
def dedent(self):
"""Decreases the indentation."""
self.current_indent -= self.indent_increment
def write_usage(self, prog, args="", prefix="Usage: "):
"""Writes a usage line into the buffer.
:param prog: the program name.
:param args: whitespace separated list of arguments.
:param prefix: the prefix for the first line.
"""
usage_prefix = f"{prefix:>{self.current_indent}}{prog} "
text_width = self.width - self.current_indent
if text_width >= (term_len(usage_prefix) + 20):
# The arguments will fit to the right of the prefix.
indent = " " * term_len(usage_prefix)
self.write(
wrap_text(
args,
text_width,
initial_indent=usage_prefix,
subsequent_indent=indent,
)
)
else:
# The prefix is too long, put the arguments on the next line.
self.write(usage_prefix)
self.write("\n")
indent = " " * (max(self.current_indent, term_len(prefix)) + 4)
self.write(
wrap_text(
args, text_width, initial_indent=indent, subsequent_indent=indent
)
)
self.write("\n")
def write_heading(self, heading):
"""Writes a heading into the buffer."""
self.write(f"{'':>{self.current_indent}}{heading}:\n")
def write_paragraph(self):
"""Writes a paragraph into the buffer."""
if self.buffer:
self.write("\n")
def write_text(self, text):
"""Writes re-indented text into the buffer. This rewraps and
preserves paragraphs.
"""
text_width = max(self.width - self.current_indent, 11)
indent = " " * self.current_indent
self.write(
wrap_text(
text,
text_width,
initial_indent=indent,
subsequent_indent=indent,
preserve_paragraphs=True,
)
)
self.write("\n")
def write_dl(self, rows, col_max=30, col_spacing=2):
"""Writes a definition list into the buffer. This is how options
and commands are usually formatted.
:param rows: a list of two item tuples for the terms and values.
:param col_max: the maximum width of the first column.
:param col_spacing: the number of spaces between the first and
second column.
"""
rows = list(rows)
widths = measure_table(rows)
if len(widths) != 2:
raise TypeError("Expected two columns for definition list")
first_col = min(widths[0], col_max) + col_spacing
for first, second in iter_rows(rows, len(widths)):
self.write(f"{'':>{self.current_indent}}{first}")
if not second:
self.write("\n")
continue
if term_len(first) <= first_col - col_spacing:
self.write(" " * (first_col - term_len(first)))
else:
self.write("\n")
self.write(" " * (first_col + self.current_indent))
text_width = max(self.width - first_col - 2, 10)
wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True)
lines = wrapped_text.splitlines()
if lines:
self.write(f"{lines[0]}\n")
for line in lines[1:]:
self.write(f"{'':>{first_col + self.current_indent}}{line}\n")
if len(lines) > 1:
# separate long help from next option
self.write("\n")
else:
self.write("\n")
@contextmanager
def section(self, name):
"""Helpful context manager that writes a paragraph, a heading,
and the indents.
:param name: the section name that is written as heading.
"""
self.write_paragraph()
self.write_heading(name)
self.indent()
try:
yield
finally:
self.dedent()
@contextmanager
def indentation(self):
"""A context manager that increases the indentation."""
self.indent()
try:
yield
finally:
self.dedent()
def getvalue(self):
"""Returns the buffer contents."""
return "".join(self.buffer)
def join_options(options):
"""Given a list of option strings this joins them in the most appropriate
way and returns them in the form ``(formatted_string,
any_prefix_is_slash)`` where the second item in the tuple is a flag that
indicates if any of the option prefixes was a slash.
"""
rv = []
any_prefix_is_slash = False
for opt in options:
prefix = split_opt(opt)[0]
if prefix == "/":
any_prefix_is_slash = True
rv.append((len(prefix), opt))
rv.sort(key=lambda x: x[0])
rv = ", ".join(x[1] for x in rv)
return rv, any_prefix_is_slash

47
libs/dynaconf/vendor/click/globals.py vendored Normal file
View File

@ -0,0 +1,47 @@
from threading import local
_local = local()
def get_current_context(silent=False):
"""Returns the current click context. This can be used as a way to
access the current context object from anywhere. This is a more implicit
alternative to the :func:`pass_context` decorator. This function is
primarily useful for helpers such as :func:`echo` which might be
interested in changing its behavior based on the current context.
To push the current context, :meth:`Context.scope` can be used.
.. versionadded:: 5.0
:param silent: if set to `True` the return value is `None` if no context
is available. The default behavior is to raise a
:exc:`RuntimeError`.
"""
try:
return _local.stack[-1]
except (AttributeError, IndexError):
if not silent:
raise RuntimeError("There is no active click context.")
def push_context(ctx):
"""Pushes a new context to the current stack."""
_local.__dict__.setdefault("stack", []).append(ctx)
def pop_context():
"""Removes the top level from the stack."""
_local.stack.pop()
def resolve_color_default(color=None):
""""Internal helper to get the default value of the color flag. If a
value is passed it's returned unchanged, otherwise it's looked up from
the current context.
"""
if color is not None:
return color
ctx = get_current_context(silent=True)
if ctx is not None:
return ctx.color

431
libs/dynaconf/vendor/click/parser.py vendored Normal file
View File

@ -0,0 +1,431 @@
"""
This module started out as largely a copy paste from the stdlib's
optparse module with the features removed that we do not need from
optparse because we implement them in Click on a higher level (for
instance type handling, help formatting and a lot more).
The plan is to remove more and more from here over time.
The reason this is a different module and not optparse from the stdlib
is that there are differences in 2.x and 3.x about the error messages
generated and optparse in the stdlib uses gettext for no good reason
and might cause us issues.
Click uses parts of optparse written by Gregory P. Ward and maintained
by the Python Software Foundation. This is limited to code in parser.py.
Copyright 2001-2006 Gregory P. Ward. All rights reserved.
Copyright 2002-2006 Python Software Foundation. All rights reserved.
"""
# This code uses parts of optparse written by Gregory P. Ward and
# maintained by the Python Software Foundation.
# Copyright 2001-2006 Gregory P. Ward
# Copyright 2002-2006 Python Software Foundation
import re
from collections import deque
from .exceptions import BadArgumentUsage
from .exceptions import BadOptionUsage
from .exceptions import NoSuchOption
from .exceptions import UsageError
def _unpack_args(args, nargs_spec):
"""Given an iterable of arguments and an iterable of nargs specifications,
it returns a tuple with all the unpacked arguments at the first index
and all remaining arguments as the second.
The nargs specification is the number of arguments that should be consumed
or `-1` to indicate that this position should eat up all the remainders.
Missing items are filled with `None`.
"""
args = deque(args)
nargs_spec = deque(nargs_spec)
rv = []
spos = None
def _fetch(c):
try:
if spos is None:
return c.popleft()
else:
return c.pop()
except IndexError:
return None
while nargs_spec:
nargs = _fetch(nargs_spec)
if nargs == 1:
rv.append(_fetch(args))
elif nargs > 1:
x = [_fetch(args) for _ in range(nargs)]
# If we're reversed, we're pulling in the arguments in reverse,
# so we need to turn them around.
if spos is not None:
x.reverse()
rv.append(tuple(x))
elif nargs < 0:
if spos is not None:
raise TypeError("Cannot have two nargs < 0")
spos = len(rv)
rv.append(None)
# spos is the position of the wildcard (star). If it's not `None`,
# we fill it with the remainder.
if spos is not None:
rv[spos] = tuple(args)
args = []
rv[spos + 1 :] = reversed(rv[spos + 1 :])
return tuple(rv), list(args)
def _error_opt_args(nargs, opt):
if nargs == 1:
raise BadOptionUsage(opt, f"{opt} option requires an argument")
raise BadOptionUsage(opt, f"{opt} option requires {nargs} arguments")
def split_opt(opt):
first = opt[:1]
if first.isalnum():
return "", opt
if opt[1:2] == first:
return opt[:2], opt[2:]
return first, opt[1:]
def normalize_opt(opt, ctx):
if ctx is None or ctx.token_normalize_func is None:
return opt
prefix, opt = split_opt(opt)
return f"{prefix}{ctx.token_normalize_func(opt)}"
def split_arg_string(string):
"""Given an argument string this attempts to split it into small parts."""
rv = []
for match in re.finditer(
r"('([^'\\]*(?:\\.[^'\\]*)*)'|\"([^\"\\]*(?:\\.[^\"\\]*)*)\"|\S+)\s*",
string,
re.S,
):
arg = match.group().strip()
if arg[:1] == arg[-1:] and arg[:1] in "\"'":
arg = arg[1:-1].encode("ascii", "backslashreplace").decode("unicode-escape")
try:
arg = type(string)(arg)
except UnicodeError:
pass
rv.append(arg)
return rv
class Option:
def __init__(self, opts, dest, action=None, nargs=1, const=None, obj=None):
self._short_opts = []
self._long_opts = []
self.prefixes = set()
for opt in opts:
prefix, value = split_opt(opt)
if not prefix:
raise ValueError(f"Invalid start character for option ({opt})")
self.prefixes.add(prefix[0])
if len(prefix) == 1 and len(value) == 1:
self._short_opts.append(opt)
else:
self._long_opts.append(opt)
self.prefixes.add(prefix)
if action is None:
action = "store"
self.dest = dest
self.action = action
self.nargs = nargs
self.const = const
self.obj = obj
@property
def takes_value(self):
return self.action in ("store", "append")
def process(self, value, state):
if self.action == "store":
state.opts[self.dest] = value
elif self.action == "store_const":
state.opts[self.dest] = self.const
elif self.action == "append":
state.opts.setdefault(self.dest, []).append(value)
elif self.action == "append_const":
state.opts.setdefault(self.dest, []).append(self.const)
elif self.action == "count":
state.opts[self.dest] = state.opts.get(self.dest, 0) + 1
else:
raise ValueError(f"unknown action '{self.action}'")
state.order.append(self.obj)
class Argument:
def __init__(self, dest, nargs=1, obj=None):
self.dest = dest
self.nargs = nargs
self.obj = obj
def process(self, value, state):
if self.nargs > 1:
holes = sum(1 for x in value if x is None)
if holes == len(value):
value = None
elif holes != 0:
raise BadArgumentUsage(
f"argument {self.dest} takes {self.nargs} values"
)
state.opts[self.dest] = value
state.order.append(self.obj)
class ParsingState:
def __init__(self, rargs):
self.opts = {}
self.largs = []
self.rargs = rargs
self.order = []
class OptionParser:
"""The option parser is an internal class that is ultimately used to
parse options and arguments. It's modelled after optparse and brings
a similar but vastly simplified API. It should generally not be used
directly as the high level Click classes wrap it for you.
It's not nearly as extensible as optparse or argparse as it does not
implement features that are implemented on a higher level (such as
types or defaults).
:param ctx: optionally the :class:`~click.Context` where this parser
should go with.
"""
def __init__(self, ctx=None):
#: The :class:`~click.Context` for this parser. This might be
#: `None` for some advanced use cases.
self.ctx = ctx
#: This controls how the parser deals with interspersed arguments.
#: If this is set to `False`, the parser will stop on the first
#: non-option. Click uses this to implement nested subcommands
#: safely.
self.allow_interspersed_args = True
#: This tells the parser how to deal with unknown options. By
#: default it will error out (which is sensible), but there is a
#: second mode where it will ignore it and continue processing
#: after shifting all the unknown options into the resulting args.
self.ignore_unknown_options = False
if ctx is not None:
self.allow_interspersed_args = ctx.allow_interspersed_args
self.ignore_unknown_options = ctx.ignore_unknown_options
self._short_opt = {}
self._long_opt = {}
self._opt_prefixes = {"-", "--"}
self._args = []
def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None):
"""Adds a new option named `dest` to the parser. The destination
is not inferred (unlike with optparse) and needs to be explicitly
provided. Action can be any of ``store``, ``store_const``,
``append``, ``appnd_const`` or ``count``.
The `obj` can be used to identify the option in the order list
that is returned from the parser.
"""
if obj is None:
obj = dest
opts = [normalize_opt(opt, self.ctx) for opt in opts]
option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj)
self._opt_prefixes.update(option.prefixes)
for opt in option._short_opts:
self._short_opt[opt] = option
for opt in option._long_opts:
self._long_opt[opt] = option
def add_argument(self, dest, nargs=1, obj=None):
"""Adds a positional argument named `dest` to the parser.
The `obj` can be used to identify the option in the order list
that is returned from the parser.
"""
if obj is None:
obj = dest
self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
def parse_args(self, args):
"""Parses positional arguments and returns ``(values, args, order)``
for the parsed options and arguments as well as the leftover
arguments if there are any. The order is a list of objects as they
appear on the command line. If arguments appear multiple times they
will be memorized multiple times as well.
"""
state = ParsingState(args)
try:
self._process_args_for_options(state)
self._process_args_for_args(state)
except UsageError:
if self.ctx is None or not self.ctx.resilient_parsing:
raise
return state.opts, state.largs, state.order
def _process_args_for_args(self, state):
pargs, args = _unpack_args(
state.largs + state.rargs, [x.nargs for x in self._args]
)
for idx, arg in enumerate(self._args):
arg.process(pargs[idx], state)
state.largs = args
state.rargs = []
def _process_args_for_options(self, state):
while state.rargs:
arg = state.rargs.pop(0)
arglen = len(arg)
# Double dashes always handled explicitly regardless of what
# prefixes are valid.
if arg == "--":
return
elif arg[:1] in self._opt_prefixes and arglen > 1:
self._process_opts(arg, state)
elif self.allow_interspersed_args:
state.largs.append(arg)
else:
state.rargs.insert(0, arg)
return
# Say this is the original argument list:
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
# ^
# (we are about to process arg(i)).
#
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
# [arg0, ..., arg(i-1)] (any options and their arguments will have
# been removed from largs).
#
# The while loop will usually consume 1 or more arguments per pass.
# If it consumes 1 (eg. arg is an option that takes no arguments),
# then after _process_arg() is done the situation is:
#
# largs = subset of [arg0, ..., arg(i)]
# rargs = [arg(i+1), ..., arg(N-1)]
#
# If allow_interspersed_args is false, largs will always be
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
# not a very interesting subset!
def _match_long_opt(self, opt, explicit_value, state):
if opt not in self._long_opt:
possibilities = [word for word in self._long_opt if word.startswith(opt)]
raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx)
option = self._long_opt[opt]
if option.takes_value:
# At this point it's safe to modify rargs by injecting the
# explicit value, because no exception is raised in this
# branch. This means that the inserted value will be fully
# consumed.
if explicit_value is not None:
state.rargs.insert(0, explicit_value)
nargs = option.nargs
if len(state.rargs) < nargs:
_error_opt_args(nargs, opt)
elif nargs == 1:
value = state.rargs.pop(0)
else:
value = tuple(state.rargs[:nargs])
del state.rargs[:nargs]
elif explicit_value is not None:
raise BadOptionUsage(opt, f"{opt} option does not take a value")
else:
value = None
option.process(value, state)
def _match_short_opt(self, arg, state):
stop = False
i = 1
prefix = arg[0]
unknown_options = []
for ch in arg[1:]:
opt = normalize_opt(f"{prefix}{ch}", self.ctx)
option = self._short_opt.get(opt)
i += 1
if not option:
if self.ignore_unknown_options:
unknown_options.append(ch)
continue
raise NoSuchOption(opt, ctx=self.ctx)
if option.takes_value:
# Any characters left in arg? Pretend they're the
# next arg, and stop consuming characters of arg.
if i < len(arg):
state.rargs.insert(0, arg[i:])
stop = True
nargs = option.nargs
if len(state.rargs) < nargs:
_error_opt_args(nargs, opt)
elif nargs == 1:
value = state.rargs.pop(0)
else:
value = tuple(state.rargs[:nargs])
del state.rargs[:nargs]
else:
value = None
option.process(value, state)
if stop:
break
# If we got any unknown options we re-combinate the string of the
# remaining options and re-attach the prefix, then report that
# to the state as new larg. This way there is basic combinatorics
# that can be achieved while still ignoring unknown arguments.
if self.ignore_unknown_options and unknown_options:
state.largs.append(f"{prefix}{''.join(unknown_options)}")
def _process_opts(self, arg, state):
explicit_value = None
# Long option handling happens in two parts. The first part is
# supporting explicitly attached values. In any case, we will try
# to long match the option first.
if "=" in arg:
long_opt, explicit_value = arg.split("=", 1)
else:
long_opt = arg
norm_long_opt = normalize_opt(long_opt, self.ctx)
# At this point we will match the (assumed) long option through
# the long option matching code. Note that this allows options
# like "-foo" to be matched as long options.
try:
self._match_long_opt(norm_long_opt, explicit_value, state)
except NoSuchOption:
# At this point the long option matching failed, and we need
# to try with short options. However there is a special rule
# which says, that if we have a two character options prefix
# (applies to "--foo" for instance), we do not dispatch to the
# short option code and will instead raise the no option
# error.
if arg[:2] not in self._opt_prefixes:
return self._match_short_opt(arg, state)
if not self.ignore_unknown_options:
raise
state.largs.append(arg)

688
libs/dynaconf/vendor/click/termui.py vendored Normal file
View File

@ -0,0 +1,688 @@
import inspect
import io
import itertools
import os
import struct
import sys
from ._compat import DEFAULT_COLUMNS
from ._compat import get_winterm_size
from ._compat import isatty
from ._compat import strip_ansi
from ._compat import WIN
from .exceptions import Abort
from .exceptions import UsageError
from .globals import resolve_color_default
from .types import Choice
from .types import convert_type
from .types import Path
from .utils import echo
from .utils import LazyFile
# The prompt functions to use. The doc tools currently override these
# functions to customize how they work.
visible_prompt_func = input
_ansi_colors = {
"black": 30,
"red": 31,
"green": 32,
"yellow": 33,
"blue": 34,
"magenta": 35,
"cyan": 36,
"white": 37,
"reset": 39,
"bright_black": 90,
"bright_red": 91,
"bright_green": 92,
"bright_yellow": 93,
"bright_blue": 94,
"bright_magenta": 95,
"bright_cyan": 96,
"bright_white": 97,
}
_ansi_reset_all = "\033[0m"
def hidden_prompt_func(prompt):
import getpass
return getpass.getpass(prompt)
def _build_prompt(
text, suffix, show_default=False, default=None, show_choices=True, type=None
):
prompt = text
if type is not None and show_choices and isinstance(type, Choice):
prompt += f" ({', '.join(map(str, type.choices))})"
if default is not None and show_default:
prompt = f"{prompt} [{_format_default(default)}]"
return f"{prompt}{suffix}"
def _format_default(default):
if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
return default.name
return default
def prompt(
text,
default=None,
hide_input=False,
confirmation_prompt=False,
type=None,
value_proc=None,
prompt_suffix=": ",
show_default=True,
err=False,
show_choices=True,
):
"""Prompts a user for input. This is a convenience function that can
be used to prompt a user for input later.
If the user aborts the input by sending a interrupt signal, this
function will catch it and raise a :exc:`Abort` exception.
.. versionadded:: 7.0
Added the show_choices parameter.
.. versionadded:: 6.0
Added unicode support for cmd.exe on Windows.
.. versionadded:: 4.0
Added the `err` parameter.
:param text: the text to show for the prompt.
:param default: the default value to use if no input happens. If this
is not given it will prompt until it's aborted.
:param hide_input: if this is set to true then the input value will
be hidden.
:param confirmation_prompt: asks for confirmation for the value.
:param type: the type to use to check the value against.
:param value_proc: if this parameter is provided it's a function that
is invoked instead of the type conversion to
convert a value.
:param prompt_suffix: a suffix that should be added to the prompt.
:param show_default: shows or hides the default value in the prompt.
:param err: if set to true the file defaults to ``stderr`` instead of
``stdout``, the same as with echo.
:param show_choices: Show or hide choices if the passed type is a Choice.
For example if type is a Choice of either day or week,
show_choices is true and text is "Group by" then the
prompt will be "Group by (day, week): ".
"""
result = None
def prompt_func(text):
f = hidden_prompt_func if hide_input else visible_prompt_func
try:
# Write the prompt separately so that we get nice
# coloring through colorama on Windows
echo(text, nl=False, err=err)
return f("")
except (KeyboardInterrupt, EOFError):
# getpass doesn't print a newline if the user aborts input with ^C.
# Allegedly this behavior is inherited from getpass(3).
# A doc bug has been filed at https://bugs.python.org/issue24711
if hide_input:
echo(None, err=err)
raise Abort()
if value_proc is None:
value_proc = convert_type(type, default)
prompt = _build_prompt(
text, prompt_suffix, show_default, default, show_choices, type
)
while 1:
while 1:
value = prompt_func(prompt)
if value:
break
elif default is not None:
if isinstance(value_proc, Path):
# validate Path default value(exists, dir_okay etc.)
value = default
break
return default
try:
result = value_proc(value)
except UsageError as e:
echo(f"Error: {e.message}", err=err) # noqa: B306
continue
if not confirmation_prompt:
return result
while 1:
value2 = prompt_func("Repeat for confirmation: ")
if value2:
break
if value == value2:
return result
echo("Error: the two entered values do not match", err=err)
def confirm(
text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False
):
"""Prompts for confirmation (yes/no question).
If the user aborts the input by sending a interrupt signal this
function will catch it and raise a :exc:`Abort` exception.
.. versionadded:: 4.0
Added the `err` parameter.
:param text: the question to ask.
:param default: the default for the prompt.
:param abort: if this is set to `True` a negative answer aborts the
exception by raising :exc:`Abort`.
:param prompt_suffix: a suffix that should be added to the prompt.
:param show_default: shows or hides the default value in the prompt.
:param err: if set to true the file defaults to ``stderr`` instead of
``stdout``, the same as with echo.
"""
prompt = _build_prompt(
text, prompt_suffix, show_default, "Y/n" if default else "y/N"
)
while 1:
try:
# Write the prompt separately so that we get nice
# coloring through colorama on Windows
echo(prompt, nl=False, err=err)
value = visible_prompt_func("").lower().strip()
except (KeyboardInterrupt, EOFError):
raise Abort()
if value in ("y", "yes"):
rv = True
elif value in ("n", "no"):
rv = False
elif value == "":
rv = default
else:
echo("Error: invalid input", err=err)
continue
break
if abort and not rv:
raise Abort()
return rv
def get_terminal_size():
"""Returns the current size of the terminal as tuple in the form
``(width, height)`` in columns and rows.
"""
import shutil
if hasattr(shutil, "get_terminal_size"):
return shutil.get_terminal_size()
# We provide a sensible default for get_winterm_size() when being invoked
# inside a subprocess. Without this, it would not provide a useful input.
if get_winterm_size is not None:
size = get_winterm_size()
if size == (0, 0):
return (79, 24)
else:
return size
def ioctl_gwinsz(fd):
try:
import fcntl
import termios
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
except Exception:
return
return cr
cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
try:
cr = ioctl_gwinsz(fd)
finally:
os.close(fd)
except Exception:
pass
if not cr or not cr[0] or not cr[1]:
cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS))
return int(cr[1]), int(cr[0])
def echo_via_pager(text_or_generator, color=None):
"""This function takes a text and shows it via an environment specific
pager on stdout.
.. versionchanged:: 3.0
Added the `color` flag.
:param text_or_generator: the text to page, or alternatively, a
generator emitting the text to page.
:param color: controls if the pager supports ANSI colors or not. The
default is autodetection.
"""
color = resolve_color_default(color)
if inspect.isgeneratorfunction(text_or_generator):
i = text_or_generator()
elif isinstance(text_or_generator, str):
i = [text_or_generator]
else:
i = iter(text_or_generator)
# convert every element of i to a text type if necessary
text_generator = (el if isinstance(el, str) else str(el) for el in i)
from ._termui_impl import pager
return pager(itertools.chain(text_generator, "\n"), color)
def progressbar(
iterable=None,
length=None,
label=None,
show_eta=True,
show_percent=None,
show_pos=False,
item_show_func=None,
fill_char="#",
empty_char="-",
bar_template="%(label)s [%(bar)s] %(info)s",
info_sep=" ",
width=36,
file=None,
color=None,
):
"""This function creates an iterable context manager that can be used
to iterate over something while showing a progress bar. It will
either iterate over the `iterable` or `length` items (that are counted
up). While iteration happens, this function will print a rendered
progress bar to the given `file` (defaults to stdout) and will attempt
to calculate remaining time and more. By default, this progress bar
will not be rendered if the file is not a terminal.
The context manager creates the progress bar. When the context
manager is entered the progress bar is already created. With every
iteration over the progress bar, the iterable passed to the bar is
advanced and the bar is updated. When the context manager exits,
a newline is printed and the progress bar is finalized on screen.
Note: The progress bar is currently designed for use cases where the
total progress can be expected to take at least several seconds.
Because of this, the ProgressBar class object won't display
progress that is considered too fast, and progress where the time
between steps is less than a second.
No printing must happen or the progress bar will be unintentionally
destroyed.
Example usage::
with progressbar(items) as bar:
for item in bar:
do_something_with(item)
Alternatively, if no iterable is specified, one can manually update the
progress bar through the `update()` method instead of directly
iterating over the progress bar. The update method accepts the number
of steps to increment the bar with::
with progressbar(length=chunks.total_bytes) as bar:
for chunk in chunks:
process_chunk(chunk)
bar.update(chunks.bytes)
The ``update()`` method also takes an optional value specifying the
``current_item`` at the new position. This is useful when used
together with ``item_show_func`` to customize the output for each
manual step::
with click.progressbar(
length=total_size,
label='Unzipping archive',
item_show_func=lambda a: a.filename
) as bar:
for archive in zip_file:
archive.extract()
bar.update(archive.size, archive)
.. versionadded:: 2.0
.. versionadded:: 4.0
Added the `color` parameter. Added a `update` method to the
progressbar object.
:param iterable: an iterable to iterate over. If not provided the length
is required.
:param length: the number of items to iterate over. By default the
progressbar will attempt to ask the iterator about its
length, which might or might not work. If an iterable is
also provided this parameter can be used to override the
length. If an iterable is not provided the progress bar
will iterate over a range of that length.
:param label: the label to show next to the progress bar.
:param show_eta: enables or disables the estimated time display. This is
automatically disabled if the length cannot be
determined.
:param show_percent: enables or disables the percentage display. The
default is `True` if the iterable has a length or
`False` if not.
:param show_pos: enables or disables the absolute position display. The
default is `False`.
:param item_show_func: a function called with the current item which
can return a string to show the current item
next to the progress bar. Note that the current
item can be `None`!
:param fill_char: the character to use to show the filled part of the
progress bar.
:param empty_char: the character to use to show the non-filled part of
the progress bar.
:param bar_template: the format string to use as template for the bar.
The parameters in it are ``label`` for the label,
``bar`` for the progress bar and ``info`` for the
info section.
:param info_sep: the separator between multiple info items (eta etc.)
:param width: the width of the progress bar in characters, 0 means full
terminal width
:param file: the file to write to. If this is not a terminal then
only the label is printed.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection. This is only needed if ANSI
codes are included anywhere in the progress bar output
which is not the case by default.
"""
from ._termui_impl import ProgressBar
color = resolve_color_default(color)
return ProgressBar(
iterable=iterable,
length=length,
show_eta=show_eta,
show_percent=show_percent,
show_pos=show_pos,
item_show_func=item_show_func,
fill_char=fill_char,
empty_char=empty_char,
bar_template=bar_template,
info_sep=info_sep,
file=file,
label=label,
width=width,
color=color,
)
def clear():
"""Clears the terminal screen. This will have the effect of clearing
the whole visible space of the terminal and moving the cursor to the
top left. This does not do anything if not connected to a terminal.
.. versionadded:: 2.0
"""
if not isatty(sys.stdout):
return
# If we're on Windows and we don't have colorama available, then we
# clear the screen by shelling out. Otherwise we can use an escape
# sequence.
if WIN:
os.system("cls")
else:
sys.stdout.write("\033[2J\033[1;1H")
def style(
text,
fg=None,
bg=None,
bold=None,
dim=None,
underline=None,
blink=None,
reverse=None,
reset=True,
):
"""Styles a text with ANSI styles and returns the new string. By
default the styling is self contained which means that at the end
of the string a reset code is issued. This can be prevented by
passing ``reset=False``.
Examples::
click.echo(click.style('Hello World!', fg='green'))
click.echo(click.style('ATTENTION!', blink=True))
click.echo(click.style('Some things', reverse=True, fg='cyan'))
Supported color names:
* ``black`` (might be a gray)
* ``red``
* ``green``
* ``yellow`` (might be an orange)
* ``blue``
* ``magenta``
* ``cyan``
* ``white`` (might be light gray)
* ``bright_black``
* ``bright_red``
* ``bright_green``
* ``bright_yellow``
* ``bright_blue``
* ``bright_magenta``
* ``bright_cyan``
* ``bright_white``
* ``reset`` (reset the color code only)
.. versionadded:: 2.0
.. versionadded:: 7.0
Added support for bright colors.
:param text: the string to style with ansi codes.
:param fg: if provided this will become the foreground color.
:param bg: if provided this will become the background color.
:param bold: if provided this will enable or disable bold mode.
:param dim: if provided this will enable or disable dim mode. This is
badly supported.
:param underline: if provided this will enable or disable underline.
:param blink: if provided this will enable or disable blinking.
:param reverse: if provided this will enable or disable inverse
rendering (foreground becomes background and the
other way round).
:param reset: by default a reset-all code is added at the end of the
string which means that styles do not carry over. This
can be disabled to compose styles.
"""
bits = []
if fg:
try:
bits.append(f"\033[{_ansi_colors[fg]}m")
except KeyError:
raise TypeError(f"Unknown color {fg!r}")
if bg:
try:
bits.append(f"\033[{_ansi_colors[bg] + 10}m")
except KeyError:
raise TypeError(f"Unknown color {bg!r}")
if bold is not None:
bits.append(f"\033[{1 if bold else 22}m")
if dim is not None:
bits.append(f"\033[{2 if dim else 22}m")
if underline is not None:
bits.append(f"\033[{4 if underline else 24}m")
if blink is not None:
bits.append(f"\033[{5 if blink else 25}m")
if reverse is not None:
bits.append(f"\033[{7 if reverse else 27}m")
bits.append(text)
if reset:
bits.append(_ansi_reset_all)
return "".join(bits)
def unstyle(text):
"""Removes ANSI styling information from a string. Usually it's not
necessary to use this function as Click's echo function will
automatically remove styling if necessary.
.. versionadded:: 2.0
:param text: the text to remove style information from.
"""
return strip_ansi(text)
def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
"""This function combines :func:`echo` and :func:`style` into one
call. As such the following two calls are the same::
click.secho('Hello World!', fg='green')
click.echo(click.style('Hello World!', fg='green'))
All keyword arguments are forwarded to the underlying functions
depending on which one they go with.
.. versionadded:: 2.0
"""
if message is not None:
message = style(message, **styles)
return echo(message, file=file, nl=nl, err=err, color=color)
def edit(
text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None
):
r"""Edits the given text in the defined editor. If an editor is given
(should be the full path to the executable but the regular operating
system search path is used for finding the executable) it overrides
the detected editor. Optionally, some environment variables can be
used. If the editor is closed without changes, `None` is returned. In
case a file is edited directly the return value is always `None` and
`require_save` and `extension` are ignored.
If the editor cannot be opened a :exc:`UsageError` is raised.
Note for Windows: to simplify cross-platform usage, the newlines are
automatically converted from POSIX to Windows and vice versa. As such,
the message here will have ``\n`` as newline markers.
:param text: the text to edit.
:param editor: optionally the editor to use. Defaults to automatic
detection.
:param env: environment variables to forward to the editor.
:param require_save: if this is true, then not saving in the editor
will make the return value become `None`.
:param extension: the extension to tell the editor about. This defaults
to `.txt` but changing this might change syntax
highlighting.
:param filename: if provided it will edit this file instead of the
provided text contents. It will not use a temporary
file as an indirection in that case.
"""
from ._termui_impl import Editor
editor = Editor(
editor=editor, env=env, require_save=require_save, extension=extension
)
if filename is None:
return editor.edit(text)
editor.edit_file(filename)
def launch(url, wait=False, locate=False):
"""This function launches the given URL (or filename) in the default
viewer application for this file type. If this is an executable, it
might launch the executable in a new session. The return value is
the exit code of the launched application. Usually, ``0`` indicates
success.
Examples::
click.launch('https://click.palletsprojects.com/')
click.launch('/my/downloaded/file', locate=True)
.. versionadded:: 2.0
:param url: URL or filename of the thing to launch.
:param wait: waits for the program to stop.
:param locate: if this is set to `True` then instead of launching the
application associated with the URL it will attempt to
launch a file manager with the file located. This
might have weird effects if the URL does not point to
the filesystem.
"""
from ._termui_impl import open_url
return open_url(url, wait=wait, locate=locate)
# If this is provided, getchar() calls into this instead. This is used
# for unittesting purposes.
_getchar = None
def getchar(echo=False):
"""Fetches a single character from the terminal and returns it. This
will always return a unicode character and under certain rare
circumstances this might return more than one character. The
situations which more than one character is returned is when for
whatever reason multiple characters end up in the terminal buffer or
standard input was not actually a terminal.
Note that this will always read from the terminal, even if something
is piped into the standard input.
Note for Windows: in rare cases when typing non-ASCII characters, this
function might wait for a second character and then return both at once.
This is because certain Unicode characters look like special-key markers.
.. versionadded:: 2.0
:param echo: if set to `True`, the character read will also show up on
the terminal. The default is to not show it.
"""
f = _getchar
if f is None:
from ._termui_impl import getchar as f
return f(echo)
def raw_terminal():
from ._termui_impl import raw_terminal as f
return f()
def pause(info="Press any key to continue ...", err=False):
"""This command stops execution and waits for the user to press any
key to continue. This is similar to the Windows batch "pause"
command. If the program is not run through a terminal, this command
will instead do nothing.
.. versionadded:: 2.0
.. versionadded:: 4.0
Added the `err` parameter.
:param info: the info string to print before pausing.
:param err: if set to message goes to ``stderr`` instead of
``stdout``, the same as with echo.
"""
if not isatty(sys.stdin) or not isatty(sys.stdout):
return
try:
if info:
echo(info, nl=False, err=err)
try:
getchar()
except (KeyboardInterrupt, EOFError):
pass
finally:
if info:
echo(err=err)

362
libs/dynaconf/vendor/click/testing.py vendored Normal file
View File

@ -0,0 +1,362 @@
import contextlib
import io
import os
import shlex
import shutil
import sys
import tempfile
from . import formatting
from . import termui
from . import utils
from ._compat import _find_binary_reader
class EchoingStdin:
def __init__(self, input, output):
self._input = input
self._output = output
def __getattr__(self, x):
return getattr(self._input, x)
def _echo(self, rv):
self._output.write(rv)
return rv
def read(self, n=-1):
return self._echo(self._input.read(n))
def readline(self, n=-1):
return self._echo(self._input.readline(n))
def readlines(self):
return [self._echo(x) for x in self._input.readlines()]
def __iter__(self):
return iter(self._echo(x) for x in self._input)
def __repr__(self):
return repr(self._input)
def make_input_stream(input, charset):
# Is already an input stream.
if hasattr(input, "read"):
rv = _find_binary_reader(input)
if rv is not None:
return rv
raise TypeError("Could not find binary reader for input stream.")
if input is None:
input = b""
elif not isinstance(input, bytes):
input = input.encode(charset)
return io.BytesIO(input)
class Result:
"""Holds the captured result of an invoked CLI script."""
def __init__(
self, runner, stdout_bytes, stderr_bytes, exit_code, exception, exc_info=None
):
#: The runner that created the result
self.runner = runner
#: The standard output as bytes.
self.stdout_bytes = stdout_bytes
#: The standard error as bytes, or None if not available
self.stderr_bytes = stderr_bytes
#: The exit code as integer.
self.exit_code = exit_code
#: The exception that happened if one did.
self.exception = exception
#: The traceback
self.exc_info = exc_info
@property
def output(self):
"""The (standard) output as unicode string."""
return self.stdout
@property
def stdout(self):
"""The standard output as unicode string."""
return self.stdout_bytes.decode(self.runner.charset, "replace").replace(
"\r\n", "\n"
)
@property
def stderr(self):
"""The standard error as unicode string."""
if self.stderr_bytes is None:
raise ValueError("stderr not separately captured")
return self.stderr_bytes.decode(self.runner.charset, "replace").replace(
"\r\n", "\n"
)
def __repr__(self):
exc_str = repr(self.exception) if self.exception else "okay"
return f"<{type(self).__name__} {exc_str}>"
class CliRunner:
"""The CLI runner provides functionality to invoke a Click command line
script for unittesting purposes in a isolated environment. This only
works in single-threaded systems without any concurrency as it changes the
global interpreter state.
:param charset: the character set for the input and output data.
:param env: a dictionary with environment variables for overriding.
:param echo_stdin: if this is set to `True`, then reading from stdin writes
to stdout. This is useful for showing examples in
some circumstances. Note that regular prompts
will automatically echo the input.
:param mix_stderr: if this is set to `False`, then stdout and stderr are
preserved as independent streams. This is useful for
Unix-philosophy apps that have predictable stdout and
noisy stderr, such that each may be measured
independently
"""
def __init__(self, charset="utf-8", env=None, echo_stdin=False, mix_stderr=True):
self.charset = charset
self.env = env or {}
self.echo_stdin = echo_stdin
self.mix_stderr = mix_stderr
def get_default_prog_name(self, cli):
"""Given a command object it will return the default program name
for it. The default is the `name` attribute or ``"root"`` if not
set.
"""
return cli.name or "root"
def make_env(self, overrides=None):
"""Returns the environment overrides for invoking a script."""
rv = dict(self.env)
if overrides:
rv.update(overrides)
return rv
@contextlib.contextmanager
def isolation(self, input=None, env=None, color=False):
"""A context manager that sets up the isolation for invoking of a
command line tool. This sets up stdin with the given input data
and `os.environ` with the overrides from the given dictionary.
This also rebinds some internals in Click to be mocked (like the
prompt functionality).
This is automatically done in the :meth:`invoke` method.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param input: the input stream to put into sys.stdin.
:param env: the environment overrides as dictionary.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
input = make_input_stream(input, self.charset)
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
old_forced_width = formatting.FORCED_WIDTH
formatting.FORCED_WIDTH = 80
env = self.make_env(env)
bytes_output = io.BytesIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
input = io.TextIOWrapper(input, encoding=self.charset)
sys.stdout = io.TextIOWrapper(bytes_output, encoding=self.charset)
if not self.mix_stderr:
bytes_error = io.BytesIO()
sys.stderr = io.TextIOWrapper(bytes_error, encoding=self.charset)
if self.mix_stderr:
sys.stderr = sys.stdout
sys.stdin = input
def visible_input(prompt=None):
sys.stdout.write(prompt or "")
val = input.readline().rstrip("\r\n")
sys.stdout.write(f"{val}\n")
sys.stdout.flush()
return val
def hidden_input(prompt=None):
sys.stdout.write(f"{prompt or ''}\n")
sys.stdout.flush()
return input.readline().rstrip("\r\n")
def _getchar(echo):
char = sys.stdin.read(1)
if echo:
sys.stdout.write(char)
sys.stdout.flush()
return char
default_color = color
def should_strip_ansi(stream=None, color=None):
if color is None:
return not default_color
return not color
old_visible_prompt_func = termui.visible_prompt_func
old_hidden_prompt_func = termui.hidden_prompt_func
old__getchar_func = termui._getchar
old_should_strip_ansi = utils.should_strip_ansi
termui.visible_prompt_func = visible_input
termui.hidden_prompt_func = hidden_input
termui._getchar = _getchar
utils.should_strip_ansi = should_strip_ansi
old_env = {}
try:
for key, value in env.items():
old_env[key] = os.environ.get(key)
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
yield (bytes_output, not self.mix_stderr and bytes_error)
finally:
for key, value in old_env.items():
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
sys.stdout = old_stdout
sys.stderr = old_stderr
sys.stdin = old_stdin
termui.visible_prompt_func = old_visible_prompt_func
termui.hidden_prompt_func = old_hidden_prompt_func
termui._getchar = old__getchar_func
utils.should_strip_ansi = old_should_strip_ansi
formatting.FORCED_WIDTH = old_forced_width
def invoke(
self,
cli,
args=None,
input=None,
env=None,
catch_exceptions=True,
color=False,
**extra,
):
"""Invokes a command in an isolated environment. The arguments are
forwarded directly to the command line script, the `extra` keyword
arguments are passed to the :meth:`~clickpkg.Command.main` function of
the command.
This returns a :class:`Result` object.
.. versionadded:: 3.0
The ``catch_exceptions`` parameter was added.
.. versionchanged:: 3.0
The result object now has an `exc_info` attribute with the
traceback if available.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param cli: the command to invoke
:param args: the arguments to invoke. It may be given as an iterable
or a string. When given as string it will be interpreted
as a Unix shell command. More details at
:func:`shlex.split`.
:param input: the input data for `sys.stdin`.
:param env: the environment overrides.
:param catch_exceptions: Whether to catch any other exceptions than
``SystemExit``.
:param extra: the keyword arguments to pass to :meth:`main`.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
exc_info = None
with self.isolation(input=input, env=env, color=color) as outstreams:
exception = None
exit_code = 0
if isinstance(args, str):
args = shlex.split(args)
try:
prog_name = extra.pop("prog_name")
except KeyError:
prog_name = self.get_default_prog_name(cli)
try:
cli.main(args=args or (), prog_name=prog_name, **extra)
except SystemExit as e:
exc_info = sys.exc_info()
exit_code = e.code
if exit_code is None:
exit_code = 0
if exit_code != 0:
exception = e
if not isinstance(exit_code, int):
sys.stdout.write(str(exit_code))
sys.stdout.write("\n")
exit_code = 1
except Exception as e:
if not catch_exceptions:
raise
exception = e
exit_code = 1
exc_info = sys.exc_info()
finally:
sys.stdout.flush()
stdout = outstreams[0].getvalue()
if self.mix_stderr:
stderr = None
else:
stderr = outstreams[1].getvalue()
return Result(
runner=self,
stdout_bytes=stdout,
stderr_bytes=stderr,
exit_code=exit_code,
exception=exception,
exc_info=exc_info,
)
@contextlib.contextmanager
def isolated_filesystem(self):
"""A context manager that creates a temporary folder and changes
the current working directory to it for isolated filesystem tests.
"""
cwd = os.getcwd()
t = tempfile.mkdtemp()
os.chdir(t)
try:
yield t
finally:
os.chdir(cwd)
try:
shutil.rmtree(t)
except OSError: # noqa: B014
pass

726
libs/dynaconf/vendor/click/types.py vendored Normal file
View File

@ -0,0 +1,726 @@
import os
import stat
from datetime import datetime
from ._compat import _get_argv_encoding
from ._compat import filename_to_ui
from ._compat import get_filesystem_encoding
from ._compat import get_strerror
from ._compat import open_stream
from .exceptions import BadParameter
from .utils import LazyFile
from .utils import safecall
class ParamType:
"""Helper for converting values through types. The following is
necessary for a valid type:
* it needs a name
* it needs to pass through None unchanged
* it needs to convert from a string
* it needs to convert its result type through unchanged
(eg: needs to be idempotent)
* it needs to be able to deal with param and context being `None`.
This can be the case when the object is used with prompt
inputs.
"""
is_composite = False
#: the descriptive name of this type
name = None
#: if a list of this type is expected and the value is pulled from a
#: string environment variable, this is what splits it up. `None`
#: means any whitespace. For all parameters the general rule is that
#: whitespace splits them up. The exception are paths and files which
#: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on
#: Windows).
envvar_list_splitter = None
def __call__(self, value, param=None, ctx=None):
if value is not None:
return self.convert(value, param, ctx)
def get_metavar(self, param):
"""Returns the metavar default for this param if it provides one."""
def get_missing_message(self, param):
"""Optionally might return extra information about a missing
parameter.
.. versionadded:: 2.0
"""
def convert(self, value, param, ctx):
"""Converts the value. This is not invoked for values that are
`None` (the missing value).
"""
return value
def split_envvar_value(self, rv):
"""Given a value from an environment variable this splits it up
into small chunks depending on the defined envvar list splitter.
If the splitter is set to `None`, which means that whitespace splits,
then leading and trailing whitespace is ignored. Otherwise, leading
and trailing splitters usually lead to empty items being included.
"""
return (rv or "").split(self.envvar_list_splitter)
def fail(self, message, param=None, ctx=None):
"""Helper method to fail with an invalid value message."""
raise BadParameter(message, ctx=ctx, param=param)
class CompositeParamType(ParamType):
is_composite = True
@property
def arity(self):
raise NotImplementedError()
class FuncParamType(ParamType):
def __init__(self, func):
self.name = func.__name__
self.func = func
def convert(self, value, param, ctx):
try:
return self.func(value)
except ValueError:
try:
value = str(value)
except UnicodeError:
value = value.decode("utf-8", "replace")
self.fail(value, param, ctx)
class UnprocessedParamType(ParamType):
name = "text"
def convert(self, value, param, ctx):
return value
def __repr__(self):
return "UNPROCESSED"
class StringParamType(ParamType):
name = "text"
def convert(self, value, param, ctx):
if isinstance(value, bytes):
enc = _get_argv_encoding()
try:
value = value.decode(enc)
except UnicodeError:
fs_enc = get_filesystem_encoding()
if fs_enc != enc:
try:
value = value.decode(fs_enc)
except UnicodeError:
value = value.decode("utf-8", "replace")
else:
value = value.decode("utf-8", "replace")
return value
return value
def __repr__(self):
return "STRING"
class Choice(ParamType):
"""The choice type allows a value to be checked against a fixed set
of supported values. All of these values have to be strings.
You should only pass a list or tuple of choices. Other iterables
(like generators) may lead to surprising results.
The resulting value will always be one of the originally passed choices
regardless of ``case_sensitive`` or any ``ctx.token_normalize_func``
being specified.
See :ref:`choice-opts` for an example.
:param case_sensitive: Set to false to make choices case
insensitive. Defaults to true.
"""
name = "choice"
def __init__(self, choices, case_sensitive=True):
self.choices = choices
self.case_sensitive = case_sensitive
def get_metavar(self, param):
return f"[{'|'.join(self.choices)}]"
def get_missing_message(self, param):
choice_str = ",\n\t".join(self.choices)
return f"Choose from:\n\t{choice_str}"
def convert(self, value, param, ctx):
# Match through normalization and case sensitivity
# first do token_normalize_func, then lowercase
# preserve original `value` to produce an accurate message in
# `self.fail`
normed_value = value
normed_choices = {choice: choice for choice in self.choices}
if ctx is not None and ctx.token_normalize_func is not None:
normed_value = ctx.token_normalize_func(value)
normed_choices = {
ctx.token_normalize_func(normed_choice): original
for normed_choice, original in normed_choices.items()
}
if not self.case_sensitive:
normed_value = normed_value.casefold()
normed_choices = {
normed_choice.casefold(): original
for normed_choice, original in normed_choices.items()
}
if normed_value in normed_choices:
return normed_choices[normed_value]
self.fail(
f"invalid choice: {value}. (choose from {', '.join(self.choices)})",
param,
ctx,
)
def __repr__(self):
return f"Choice({list(self.choices)})"
class DateTime(ParamType):
"""The DateTime type converts date strings into `datetime` objects.
The format strings which are checked are configurable, but default to some
common (non-timezone aware) ISO 8601 formats.
When specifying *DateTime* formats, you should only pass a list or a tuple.
Other iterables, like generators, may lead to surprising results.
The format strings are processed using ``datetime.strptime``, and this
consequently defines the format strings which are allowed.
Parsing is tried using each format, in order, and the first format which
parses successfully is used.
:param formats: A list or tuple of date format strings, in the order in
which they should be tried. Defaults to
``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``,
``'%Y-%m-%d %H:%M:%S'``.
"""
name = "datetime"
def __init__(self, formats=None):
self.formats = formats or ["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]
def get_metavar(self, param):
return f"[{'|'.join(self.formats)}]"
def _try_to_convert_date(self, value, format):
try:
return datetime.strptime(value, format)
except ValueError:
return None
def convert(self, value, param, ctx):
# Exact match
for format in self.formats:
dtime = self._try_to_convert_date(value, format)
if dtime:
return dtime
self.fail(
f"invalid datetime format: {value}. (choose from {', '.join(self.formats)})"
)
def __repr__(self):
return "DateTime"
class IntParamType(ParamType):
name = "integer"
def convert(self, value, param, ctx):
try:
return int(value)
except ValueError:
self.fail(f"{value} is not a valid integer", param, ctx)
def __repr__(self):
return "INT"
class IntRange(IntParamType):
"""A parameter that works similar to :data:`click.INT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = "integer range"
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = IntParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if (
self.min is not None
and rv < self.min
or self.max is not None
and rv > self.max
):
if self.min is None:
self.fail(
f"{rv} is bigger than the maximum valid value {self.max}.",
param,
ctx,
)
elif self.max is None:
self.fail(
f"{rv} is smaller than the minimum valid value {self.min}.",
param,
ctx,
)
else:
self.fail(
f"{rv} is not in the valid range of {self.min} to {self.max}.",
param,
ctx,
)
return rv
def __repr__(self):
return f"IntRange({self.min}, {self.max})"
class FloatParamType(ParamType):
name = "float"
def convert(self, value, param, ctx):
try:
return float(value)
except ValueError:
self.fail(f"{value} is not a valid floating point value", param, ctx)
def __repr__(self):
return "FLOAT"
class FloatRange(FloatParamType):
"""A parameter that works similar to :data:`click.FLOAT` but restricts
the value to fit into a range. The default behavior is to fail if the
value falls outside the range, but it can also be silently clamped
between the two edges.
See :ref:`ranges` for an example.
"""
name = "float range"
def __init__(self, min=None, max=None, clamp=False):
self.min = min
self.max = max
self.clamp = clamp
def convert(self, value, param, ctx):
rv = FloatParamType.convert(self, value, param, ctx)
if self.clamp:
if self.min is not None and rv < self.min:
return self.min
if self.max is not None and rv > self.max:
return self.max
if (
self.min is not None
and rv < self.min
or self.max is not None
and rv > self.max
):
if self.min is None:
self.fail(
f"{rv} is bigger than the maximum valid value {self.max}.",
param,
ctx,
)
elif self.max is None:
self.fail(
f"{rv} is smaller than the minimum valid value {self.min}.",
param,
ctx,
)
else:
self.fail(
f"{rv} is not in the valid range of {self.min} to {self.max}.",
param,
ctx,
)
return rv
def __repr__(self):
return f"FloatRange({self.min}, {self.max})"
class BoolParamType(ParamType):
name = "boolean"
def convert(self, value, param, ctx):
if isinstance(value, bool):
return bool(value)
value = value.lower()
if value in ("true", "t", "1", "yes", "y"):
return True
elif value in ("false", "f", "0", "no", "n"):
return False
self.fail(f"{value} is not a valid boolean", param, ctx)
def __repr__(self):
return "BOOL"
class UUIDParameterType(ParamType):
name = "uuid"
def convert(self, value, param, ctx):
import uuid
try:
return uuid.UUID(value)
except ValueError:
self.fail(f"{value} is not a valid UUID value", param, ctx)
def __repr__(self):
return "UUID"
class File(ParamType):
"""Declares a parameter to be a file for reading or writing. The file
is automatically closed once the context tears down (after the command
finished working).
Files can be opened for reading or writing. The special value ``-``
indicates stdin or stdout depending on the mode.
By default, the file is opened for reading text data, but it can also be
opened in binary mode or for writing. The encoding parameter can be used
to force a specific encoding.
The `lazy` flag controls if the file should be opened immediately or upon
first IO. The default is to be non-lazy for standard input and output
streams as well as files opened for reading, `lazy` otherwise. When opening a
file lazily for reading, it is still opened temporarily for validation, but
will not be held open until first IO. lazy is mainly useful when opening
for writing to avoid creating the file until it is needed.
Starting with Click 2.0, files can also be opened atomically in which
case all writes go into a separate file in the same folder and upon
completion the file will be moved over to the original location. This
is useful if a file regularly read by other users is modified.
See :ref:`file-args` for more information.
"""
name = "filename"
envvar_list_splitter = os.path.pathsep
def __init__(
self, mode="r", encoding=None, errors="strict", lazy=None, atomic=False
):
self.mode = mode
self.encoding = encoding
self.errors = errors
self.lazy = lazy
self.atomic = atomic
def resolve_lazy_flag(self, value):
if self.lazy is not None:
return self.lazy
if value == "-":
return False
elif "w" in self.mode:
return True
return False
def convert(self, value, param, ctx):
try:
if hasattr(value, "read") or hasattr(value, "write"):
return value
lazy = self.resolve_lazy_flag(value)
if lazy:
f = LazyFile(
value, self.mode, self.encoding, self.errors, atomic=self.atomic
)
if ctx is not None:
ctx.call_on_close(f.close_intelligently)
return f
f, should_close = open_stream(
value, self.mode, self.encoding, self.errors, atomic=self.atomic
)
# If a context is provided, we automatically close the file
# at the end of the context execution (or flush out). If a
# context does not exist, it's the caller's responsibility to
# properly close the file. This for instance happens when the
# type is used with prompts.
if ctx is not None:
if should_close:
ctx.call_on_close(safecall(f.close))
else:
ctx.call_on_close(safecall(f.flush))
return f
except OSError as e: # noqa: B014
self.fail(
f"Could not open file: {filename_to_ui(value)}: {get_strerror(e)}",
param,
ctx,
)
class Path(ParamType):
"""The path type is similar to the :class:`File` type but it performs
different checks. First of all, instead of returning an open file
handle it returns just the filename. Secondly, it can perform various
basic checks about what the file or directory should be.
.. versionchanged:: 6.0
`allow_dash` was added.
:param exists: if set to true, the file or directory needs to exist for
this value to be valid. If this is not required and a
file does indeed not exist, then all further checks are
silently skipped.
:param file_okay: controls if a file is a possible value.
:param dir_okay: controls if a directory is a possible value.
:param writable: if true, a writable check is performed.
:param readable: if true, a readable check is performed.
:param resolve_path: if this is true, then the path is fully resolved
before the value is passed onwards. This means
that it's absolute and symlinks are resolved. It
will not expand a tilde-prefix, as this is
supposed to be done by the shell only.
:param allow_dash: If this is set to `True`, a single dash to indicate
standard streams is permitted.
:param path_type: optionally a string type that should be used to
represent the path. The default is `None` which
means the return value will be either bytes or
unicode depending on what makes most sense given the
input data Click deals with.
"""
envvar_list_splitter = os.path.pathsep
def __init__(
self,
exists=False,
file_okay=True,
dir_okay=True,
writable=False,
readable=True,
resolve_path=False,
allow_dash=False,
path_type=None,
):
self.exists = exists
self.file_okay = file_okay
self.dir_okay = dir_okay
self.writable = writable
self.readable = readable
self.resolve_path = resolve_path
self.allow_dash = allow_dash
self.type = path_type
if self.file_okay and not self.dir_okay:
self.name = "file"
self.path_type = "File"
elif self.dir_okay and not self.file_okay:
self.name = "directory"
self.path_type = "Directory"
else:
self.name = "path"
self.path_type = "Path"
def coerce_path_result(self, rv):
if self.type is not None and not isinstance(rv, self.type):
if self.type is str:
rv = rv.decode(get_filesystem_encoding())
else:
rv = rv.encode(get_filesystem_encoding())
return rv
def convert(self, value, param, ctx):
rv = value
is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-")
if not is_dash:
if self.resolve_path:
rv = os.path.realpath(rv)
try:
st = os.stat(rv)
except OSError:
if not self.exists:
return self.coerce_path_result(rv)
self.fail(
f"{self.path_type} {filename_to_ui(value)!r} does not exist.",
param,
ctx,
)
if not self.file_okay and stat.S_ISREG(st.st_mode):
self.fail(
f"{self.path_type} {filename_to_ui(value)!r} is a file.",
param,
ctx,
)
if not self.dir_okay and stat.S_ISDIR(st.st_mode):
self.fail(
f"{self.path_type} {filename_to_ui(value)!r} is a directory.",
param,
ctx,
)
if self.writable and not os.access(value, os.W_OK):
self.fail(
f"{self.path_type} {filename_to_ui(value)!r} is not writable.",
param,
ctx,
)
if self.readable and not os.access(value, os.R_OK):
self.fail(
f"{self.path_type} {filename_to_ui(value)!r} is not readable.",
param,
ctx,
)
return self.coerce_path_result(rv)
class Tuple(CompositeParamType):
"""The default behavior of Click is to apply a type on a value directly.
This works well in most cases, except for when `nargs` is set to a fixed
count and different types should be used for different items. In this
case the :class:`Tuple` type can be used. This type can only be used
if `nargs` is set to a fixed number.
For more information see :ref:`tuple-type`.
This can be selected by using a Python tuple literal as a type.
:param types: a list of types that should be used for the tuple items.
"""
def __init__(self, types):
self.types = [convert_type(ty) for ty in types]
@property
def name(self):
return f"<{' '.join(ty.name for ty in self.types)}>"
@property
def arity(self):
return len(self.types)
def convert(self, value, param, ctx):
if len(value) != len(self.types):
raise TypeError(
"It would appear that nargs is set to conflict with the"
" composite type arity."
)
return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value))
def convert_type(ty, default=None):
"""Converts a callable or python type into the most appropriate
param type.
"""
guessed_type = False
if ty is None and default is not None:
if isinstance(default, tuple):
ty = tuple(map(type, default))
else:
ty = type(default)
guessed_type = True
if isinstance(ty, tuple):
return Tuple(ty)
if isinstance(ty, ParamType):
return ty
if ty is str or ty is None:
return STRING
if ty is int:
return INT
# Booleans are only okay if not guessed. This is done because for
# flags the default value is actually a bit of a lie in that it
# indicates which of the flags is the one we want. See get_default()
# for more information.
if ty is bool and not guessed_type:
return BOOL
if ty is float:
return FLOAT
if guessed_type:
return STRING
# Catch a common mistake
if __debug__:
try:
if issubclass(ty, ParamType):
raise AssertionError(
f"Attempted to use an uninstantiated parameter type ({ty})."
)
except TypeError:
pass
return FuncParamType(ty)
#: A dummy parameter type that just does nothing. From a user's
#: perspective this appears to just be the same as `STRING` but
#: internally no string conversion takes place if the input was bytes.
#: This is usually useful when working with file paths as they can
#: appear in bytes and unicode.
#:
#: For path related uses the :class:`Path` type is a better choice but
#: there are situations where an unprocessed type is useful which is why
#: it is is provided.
#:
#: .. versionadded:: 4.0
UNPROCESSED = UnprocessedParamType()
#: A unicode string parameter type which is the implicit default. This
#: can also be selected by using ``str`` as type.
STRING = StringParamType()
#: An integer parameter. This can also be selected by using ``int`` as
#: type.
INT = IntParamType()
#: A floating point value parameter. This can also be selected by using
#: ``float`` as type.
FLOAT = FloatParamType()
#: A boolean parameter. This is the default for boolean flags. This can
#: also be selected by using ``bool`` as a type.
BOOL = BoolParamType()
#: A UUID parameter.
UUID = UUIDParameterType()

440
libs/dynaconf/vendor/click/utils.py vendored Normal file
View File

@ -0,0 +1,440 @@
import os
import sys
from ._compat import _default_text_stderr
from ._compat import _default_text_stdout
from ._compat import _find_binary_writer
from ._compat import auto_wrap_for_ansi
from ._compat import binary_streams
from ._compat import filename_to_ui
from ._compat import get_filesystem_encoding
from ._compat import get_strerror
from ._compat import is_bytes
from ._compat import open_stream
from ._compat import should_strip_ansi
from ._compat import strip_ansi
from ._compat import text_streams
from ._compat import WIN
from .globals import resolve_color_default
echo_native_types = (str, bytes, bytearray)
def _posixify(name):
return "-".join(name.split()).lower()
def safecall(func):
"""Wraps a function so that it swallows exceptions."""
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
pass
return wrapper
def make_str(value):
"""Converts a value into a valid string."""
if isinstance(value, bytes):
try:
return value.decode(get_filesystem_encoding())
except UnicodeError:
return value.decode("utf-8", "replace")
return str(value)
def make_default_short_help(help, max_length=45):
"""Return a condensed version of help string."""
words = help.split()
total_length = 0
result = []
done = False
for word in words:
if word[-1:] == ".":
done = True
new_length = 1 + len(word) if result else len(word)
if total_length + new_length > max_length:
result.append("...")
done = True
else:
if result:
result.append(" ")
result.append(word)
if done:
break
total_length += new_length
return "".join(result)
class LazyFile:
"""A lazy file works like a regular file but it does not fully open
the file but it does perform some basic checks early to see if the
filename parameter does make sense. This is useful for safely opening
files for writing.
"""
def __init__(
self, filename, mode="r", encoding=None, errors="strict", atomic=False
):
self.name = filename
self.mode = mode
self.encoding = encoding
self.errors = errors
self.atomic = atomic
if filename == "-":
self._f, self.should_close = open_stream(filename, mode, encoding, errors)
else:
if "r" in mode:
# Open and close the file in case we're opening it for
# reading so that we can catch at least some errors in
# some cases early.
open(filename, mode).close()
self._f = None
self.should_close = True
def __getattr__(self, name):
return getattr(self.open(), name)
def __repr__(self):
if self._f is not None:
return repr(self._f)
return f"<unopened file '{self.name}' {self.mode}>"
def open(self):
"""Opens the file if it's not yet open. This call might fail with
a :exc:`FileError`. Not handling this error will produce an error
that Click shows.
"""
if self._f is not None:
return self._f
try:
rv, self.should_close = open_stream(
self.name, self.mode, self.encoding, self.errors, atomic=self.atomic
)
except OSError as e: # noqa: E402
from .exceptions import FileError
raise FileError(self.name, hint=get_strerror(e))
self._f = rv
return rv
def close(self):
"""Closes the underlying file, no matter what."""
if self._f is not None:
self._f.close()
def close_intelligently(self):
"""This function only closes the file if it was opened by the lazy
file wrapper. For instance this will never close stdin.
"""
if self.should_close:
self.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
self.close_intelligently()
def __iter__(self):
self.open()
return iter(self._f)
class KeepOpenFile:
def __init__(self, file):
self._file = file
def __getattr__(self, name):
return getattr(self._file, name)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
pass
def __repr__(self):
return repr(self._file)
def __iter__(self):
return iter(self._file)
def echo(message=None, file=None, nl=True, err=False, color=None):
"""Prints a message plus a newline to the given file or stdout. On
first sight, this looks like the print function, but it has improved
support for handling Unicode and binary data that does not fail no
matter how badly configured the system is.
Primarily it means that you can print binary data as well as Unicode
data on both 2.x and 3.x to the given file in the most appropriate way
possible. This is a very carefree function in that it will try its
best to not fail. As of Click 6.0 this includes support for unicode
output on the Windows console.
In addition to that, if `colorama`_ is installed, the echo function will
also support clever handling of ANSI codes. Essentially it will then
do the following:
- add transparent handling of ANSI color codes on Windows.
- hide ANSI codes automatically if the destination file is not a
terminal.
.. _colorama: https://pypi.org/project/colorama/
.. versionchanged:: 6.0
As of Click 6.0 the echo function will properly support unicode
output on the windows console. Not that click does not modify
the interpreter in any way which means that `sys.stdout` or the
print statement or function will still not provide unicode support.
.. versionchanged:: 2.0
Starting with version 2.0 of Click, the echo function will work
with colorama if it's installed.
.. versionadded:: 3.0
The `err` parameter was added.
.. versionchanged:: 4.0
Added the `color` flag.
:param message: the message to print
:param file: the file to write to (defaults to ``stdout``)
:param err: if set to true the file defaults to ``stderr`` instead of
``stdout``. This is faster and easier than calling
:func:`get_text_stderr` yourself.
:param nl: if set to `True` (the default) a newline is printed afterwards.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection.
"""
if file is None:
if err:
file = _default_text_stderr()
else:
file = _default_text_stdout()
# Convert non bytes/text into the native string type.
if message is not None and not isinstance(message, echo_native_types):
message = str(message)
if nl:
message = message or ""
if isinstance(message, str):
message += "\n"
else:
message += b"\n"
# If there is a message and the value looks like bytes, we manually
# need to find the binary stream and write the message in there.
# This is done separately so that most stream types will work as you
# would expect. Eg: you can write to StringIO for other cases.
if message and is_bytes(message):
binary_file = _find_binary_writer(file)
if binary_file is not None:
file.flush()
binary_file.write(message)
binary_file.flush()
return
# ANSI-style support. If there is no message or we are dealing with
# bytes nothing is happening. If we are connected to a file we want
# to strip colors. If we are on windows we either wrap the stream
# to strip the color or we use the colorama support to translate the
# ansi codes to API calls.
if message and not is_bytes(message):
color = resolve_color_default(color)
if should_strip_ansi(file, color):
message = strip_ansi(message)
elif WIN:
if auto_wrap_for_ansi is not None:
file = auto_wrap_for_ansi(file)
elif not color:
message = strip_ansi(message)
if message:
file.write(message)
file.flush()
def get_binary_stream(name):
"""Returns a system stream for byte processing.
:param name: the name of the stream to open. Valid names are ``'stdin'``,
``'stdout'`` and ``'stderr'``
"""
opener = binary_streams.get(name)
if opener is None:
raise TypeError(f"Unknown standard stream '{name}'")
return opener()
def get_text_stream(name, encoding=None, errors="strict"):
"""Returns a system stream for text processing. This usually returns
a wrapped stream around a binary stream returned from
:func:`get_binary_stream` but it also can take shortcuts for already
correctly configured streams.
:param name: the name of the stream to open. Valid names are ``'stdin'``,
``'stdout'`` and ``'stderr'``
:param encoding: overrides the detected default encoding.
:param errors: overrides the default error mode.
"""
opener = text_streams.get(name)
if opener is None:
raise TypeError(f"Unknown standard stream '{name}'")
return opener(encoding, errors)
def open_file(
filename, mode="r", encoding=None, errors="strict", lazy=False, atomic=False
):
"""This is similar to how the :class:`File` works but for manual
usage. Files are opened non lazy by default. This can open regular
files as well as stdin/stdout if ``'-'`` is passed.
If stdin/stdout is returned the stream is wrapped so that the context
manager will not close the stream accidentally. This makes it possible
to always use the function like this without having to worry to
accidentally close a standard stream::
with open_file(filename) as f:
...
.. versionadded:: 3.0
:param filename: the name of the file to open (or ``'-'`` for stdin/stdout).
:param mode: the mode in which to open the file.
:param encoding: the encoding to use.
:param errors: the error handling for this file.
:param lazy: can be flipped to true to open the file lazily.
:param atomic: in atomic mode writes go into a temporary file and it's
moved on close.
"""
if lazy:
return LazyFile(filename, mode, encoding, errors, atomic=atomic)
f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic)
if not should_close:
f = KeepOpenFile(f)
return f
def get_os_args():
"""Returns the argument part of ``sys.argv``, removing the first
value which is the name of the script.
.. deprecated:: 8.0
Will be removed in 8.1. Access ``sys.argv[1:]`` directly
instead.
"""
import warnings
warnings.warn(
"'get_os_args' is deprecated and will be removed in 8.1. Access"
" 'sys.argv[1:]' directly instead.",
DeprecationWarning,
stacklevel=2,
)
return sys.argv[1:]
def format_filename(filename, shorten=False):
"""Formats a filename for user display. The main purpose of this
function is to ensure that the filename can be displayed at all. This
will decode the filename to unicode if necessary in a way that it will
not fail. Optionally, it can shorten the filename to not include the
full path to the filename.
:param filename: formats a filename for UI display. This will also convert
the filename into unicode without failing.
:param shorten: this optionally shortens the filename to strip of the
path that leads up to it.
"""
if shorten:
filename = os.path.basename(filename)
return filename_to_ui(filename)
def get_app_dir(app_name, roaming=True, force_posix=False):
r"""Returns the config folder for the application. The default behavior
is to return whatever is most appropriate for the operating system.
To give you an idea, for an app called ``"Foo Bar"``, something like
the following folders could be returned:
Mac OS X:
``~/Library/Application Support/Foo Bar``
Mac OS X (POSIX):
``~/.foo-bar``
Unix:
``~/.config/foo-bar``
Unix (POSIX):
``~/.foo-bar``
Win XP (roaming):
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
Win XP (not roaming):
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
Win 7 (roaming):
``C:\Users\<user>\AppData\Roaming\Foo Bar``
Win 7 (not roaming):
``C:\Users\<user>\AppData\Local\Foo Bar``
.. versionadded:: 2.0
:param app_name: the application name. This should be properly capitalized
and can contain whitespace.
:param roaming: controls if the folder should be roaming or not on Windows.
Has no affect otherwise.
:param force_posix: if this is set to `True` then on any POSIX system the
folder will be stored in the home folder with a leading
dot instead of the XDG config home or darwin's
application support folder.
"""
if WIN:
key = "APPDATA" if roaming else "LOCALAPPDATA"
folder = os.environ.get(key)
if folder is None:
folder = os.path.expanduser("~")
return os.path.join(folder, app_name)
if force_posix:
return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}"))
if sys.platform == "darwin":
return os.path.join(
os.path.expanduser("~/Library/Application Support"), app_name
)
return os.path.join(
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
_posixify(app_name),
)
class PacifyFlushWrapper:
"""This wrapper is used to catch and suppress BrokenPipeErrors resulting
from ``.flush()`` being called on broken pipe during the shutdown/final-GC
of the Python interpreter. Notably ``.flush()`` is always called on
``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any
other cleanup code, and the case where the underlying file is not a broken
pipe, all calls and attributes are proxied.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def flush(self):
try:
self.wrapped.flush()
except OSError as e:
import errno
if e.errno != errno.EPIPE:
raise
def __getattr__(self, attr):
return getattr(self.wrapped, attr)

46
libs/dynaconf/vendor/dotenv/__init__.py vendored Normal file
View File

@ -0,0 +1,46 @@
from .compat import IS_TYPE_CHECKING
from .main import load_dotenv, get_key, set_key, unset_key, find_dotenv, dotenv_values
if IS_TYPE_CHECKING:
from typing import Any, Optional
def load_ipython_extension(ipython):
# type: (Any) -> None
from .ipython import load_ipython_extension
load_ipython_extension(ipython)
def get_cli_string(path=None, action=None, key=None, value=None, quote=None):
# type: (Optional[str], Optional[str], Optional[str], Optional[str], Optional[str]) -> str
"""Returns a string suitable for running as a shell script.
Useful for converting a arguments passed to a fabric task
to be passed to a `local` or `run` command.
"""
command = ['dotenv']
if quote:
command.append('-q %s' % quote)
if path:
command.append('-f %s' % path)
if action:
command.append(action)
if key:
command.append(key)
if value:
if ' ' in value:
command.append('"%s"' % value)
else:
command.append(value)
return ' '.join(command).strip()
__all__ = ['get_cli_string',
'load_dotenv',
'dotenv_values',
'get_key',
'set_key',
'unset_key',
'find_dotenv',
'load_ipython_extension']

145
libs/dynaconf/vendor/dotenv/cli.py vendored Normal file
View File

@ -0,0 +1,145 @@
import os
import sys
from subprocess import Popen
try:
from dynaconf.vendor import click
except ImportError:
sys.stderr.write('It seems python-dotenv is not installed with cli option. \n'
'Run pip install "python-dotenv[cli]" to fix this.')
sys.exit(1)
from .compat import IS_TYPE_CHECKING, to_env
from .main import dotenv_values, get_key, set_key, unset_key
from .version import __version__
if IS_TYPE_CHECKING:
from typing import Any, List, Dict
@click.group()
@click.option('-f', '--file', default=os.path.join(os.getcwd(), '.env'),
type=click.Path(exists=True),
help="Location of the .env file, defaults to .env file in current working directory.")
@click.option('-q', '--quote', default='always',
type=click.Choice(['always', 'never', 'auto']),
help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.")
@click.version_option(version=__version__)
@click.pass_context
def cli(ctx, file, quote):
# type: (click.Context, Any, Any) -> None
'''This script is used to set, get or unset values from a .env file.'''
ctx.obj = {}
ctx.obj['FILE'] = file
ctx.obj['QUOTE'] = quote
@cli.command()
@click.pass_context
def list(ctx):
# type: (click.Context) -> None
'''Display all the stored key/value.'''
file = ctx.obj['FILE']
dotenv_as_dict = dotenv_values(file)
for k, v in dotenv_as_dict.items():
click.echo('%s=%s' % (k, v))
@cli.command()
@click.pass_context
@click.argument('key', required=True)
@click.argument('value', required=True)
def set(ctx, key, value):
# type: (click.Context, Any, Any) -> None
'''Store the given key/value.'''
file = ctx.obj['FILE']
quote = ctx.obj['QUOTE']
success, key, value = set_key(file, key, value, quote)
if success:
click.echo('%s=%s' % (key, value))
else:
exit(1)
@cli.command()
@click.pass_context
@click.argument('key', required=True)
def get(ctx, key):
# type: (click.Context, Any) -> None
'''Retrieve the value for the given key.'''
file = ctx.obj['FILE']
stored_value = get_key(file, key)
if stored_value:
click.echo('%s=%s' % (key, stored_value))
else:
exit(1)
@cli.command()
@click.pass_context
@click.argument('key', required=True)
def unset(ctx, key):
# type: (click.Context, Any) -> None
'''Removes the given key.'''
file = ctx.obj['FILE']
quote = ctx.obj['QUOTE']
success, key = unset_key(file, key, quote)
if success:
click.echo("Successfully removed %s" % key)
else:
exit(1)
@cli.command(context_settings={'ignore_unknown_options': True})
@click.pass_context
@click.argument('commandline', nargs=-1, type=click.UNPROCESSED)
def run(ctx, commandline):
# type: (click.Context, List[str]) -> None
"""Run command with environment variables present."""
file = ctx.obj['FILE']
dotenv_as_dict = {to_env(k): to_env(v) for (k, v) in dotenv_values(file).items() if v is not None}
if not commandline:
click.echo('No command given.')
exit(1)
ret = run_command(commandline, dotenv_as_dict)
exit(ret)
def run_command(command, env):
# type: (List[str], Dict[str, str]) -> int
"""Run command in sub process.
Runs the command in a sub process with the variables from `env`
added in the current environment variables.
Parameters
----------
command: List[str]
The command and it's parameters
env: Dict
The additional environment variables
Returns
-------
int
The return code of the command
"""
# copy the current environment variables and add the vales from
# `env`
cmd_env = os.environ.copy()
cmd_env.update(env)
p = Popen(command,
universal_newlines=True,
bufsize=0,
shell=False,
env=cmd_env)
_, _ = p.communicate()
return p.returncode
if __name__ == "__main__":
cli()

49
libs/dynaconf/vendor/dotenv/compat.py vendored Normal file
View File

@ -0,0 +1,49 @@
import sys
PY2 = sys.version_info[0] == 2 # type: bool
if PY2:
from StringIO import StringIO # noqa
else:
from io import StringIO # noqa
def is_type_checking():
# type: () -> bool
try:
from typing import TYPE_CHECKING
except ImportError:
return False
return TYPE_CHECKING
IS_TYPE_CHECKING = is_type_checking()
if IS_TYPE_CHECKING:
from typing import Text
def to_env(text):
# type: (Text) -> str
"""
Encode a string the same way whether it comes from the environment or a `.env` file.
"""
if PY2:
return text.encode(sys.getfilesystemencoding() or "utf-8")
else:
return text
def to_text(string):
# type: (str) -> Text
"""
Make a string Unicode if it isn't already.
This is useful for defining raw unicode strings because `ur"foo"` isn't valid in
Python 3.
"""
if PY2:
return string.decode("utf-8")
else:
return string

Some files were not shown because too many files have changed in this diff Show More