Merge development into master

This commit is contained in:
github-actions[bot] 2021-12-30 11:52:19 +00:00 committed by GitHub
commit e305aad597
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
490 changed files with 28834 additions and 111318 deletions

View File

@ -46,6 +46,7 @@ If you need something that is not already part of Bazarr, feel free to create a
* Assrt
* BetaSeries
* BSplayer
* Embedded Subtitles
* GreekSubtitles
* Hosszupuska
* LegendasDivx

View File

@ -20,8 +20,8 @@ def check_python_version():
print("Python " + minimum_py3_str + " or greater required. "
"Current version is " + platform.python_version() + ". Please upgrade Python.")
sys.exit(1)
elif int(python_version[0]) == 3 and int(python_version[1]) == 9:
print("Python 3.9.x is unsupported. Current version is " + platform.python_version() +
elif int(python_version[0]) == 3 and int(python_version[1]) > 10:
print("Python version greater than 3.10.x is unsupported. Current version is " + platform.python_version() +
". Keep in mind that even if it works, you're on your own.")
elif (int(python_version[0]) == minimum_py3_tuple[0] and int(python_version[1]) < minimum_py3_tuple[1]) or \
(int(python_version[0]) != minimum_py3_tuple[0]):

File diff suppressed because it is too large Load Diff

25
bazarr/api/__init__.py Normal file
View File

@ -0,0 +1,25 @@
# coding=utf-8
from .badges import api_bp_badges
from .system import api_bp_system
from .series import api_bp_series
from .episodes import api_bp_episodes
from .providers import api_bp_providers
from .subtitles import api_bp_subtitles
from .webhooks import api_bp_webhooks
from .history import api_bp_history
from .files import api_bp_files
from .movies import api_bp_movies
api_bp_list = [
api_bp_badges,
api_bp_system,
api_bp_series,
api_bp_episodes,
api_bp_providers,
api_bp_subtitles,
api_bp_webhooks,
api_bp_history,
api_bp_files,
api_bp_movies
]

View File

@ -0,0 +1,12 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .badges import Badges
api_bp_badges = Blueprint('api_badges', __name__)
api = Api(api_bp_badges)
api.add_resource(Badges, '/badges')

View File

@ -0,0 +1,47 @@
# coding=utf-8
from flask import jsonify
from flask_restful import Resource
import operator
from functools import reduce
from database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies
from get_providers import get_throttled_providers
from utils import get_health_issues
from ..utils import authenticate
class Badges(Resource):
@authenticate
def get(self):
episodes_conditions = [(TableEpisodes.missing_subtitles is not None),
(TableEpisodes.missing_subtitles != '[]')]
episodes_conditions += get_exclusion_clause('series')
missing_episodes = TableEpisodes.select(TableShows.tags,
TableShows.seriesType,
TableEpisodes.monitored)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, episodes_conditions))\
.count()
movies_conditions = [(TableMovies.missing_subtitles is not None),
(TableMovies.missing_subtitles != '[]')]
movies_conditions += get_exclusion_clause('movie')
missing_movies = TableMovies.select(TableMovies.tags,
TableMovies.monitored)\
.where(reduce(operator.and_, movies_conditions))\
.count()
throttled_providers = len(get_throttled_providers())
health_issues = len(get_health_issues())
result = {
"episodes": missing_episodes,
"movies": missing_movies,
"providers": throttled_providers,
"status": health_issues
}
return jsonify(result)

View File

@ -0,0 +1,20 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .episodes import Episodes
from .episodes_subtitles import EpisodesSubtitles
from .history import EpisodesHistory
from .wanted import EpisodesWanted
from .blacklist import EpisodesBlacklist
api_bp_episodes = Blueprint('api_episodes', __name__)
api = Api(api_bp_episodes)
api.add_resource(Episodes, '/episodes')
api.add_resource(EpisodesWanted, '/episodes/wanted')
api.add_resource(EpisodesSubtitles, '/episodes/subtitles')
api.add_resource(EpisodesHistory, '/episodes/history')
api.add_resource(EpisodesBlacklist, '/episodes/blacklist')

View File

@ -0,0 +1,92 @@
# coding=utf-8
import datetime
import pretty
from flask import request, jsonify
from flask_restful import Resource
from database import TableEpisodes, TableShows, TableBlacklist
from ..utils import authenticate, postprocessEpisode
from utils import blacklist_log, delete_subtitles, blacklist_delete_all, blacklist_delete
from helper import path_mappings
from get_subtitle import episode_download_subtitles
from event_handler import event_stream
# GET: get blacklist
# POST: add blacklist
# DELETE: remove blacklist
class EpisodesBlacklist(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
data = TableBlacklist.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.sonarrSeriesId,
TableBlacklist.provider,
TableBlacklist.subs_id,
TableBlacklist.language,
TableBlacklist.timestamp)\
.join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\
.join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\
.order_by(TableBlacklist.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
data = list(data)
for item in data:
# Make timestamp pretty
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
postprocessEpisode(item)
return jsonify(data=data)
@authenticate
def post(self):
sonarr_series_id = int(request.args.get('seriesid'))
sonarr_episode_id = int(request.args.get('episodeid'))
provider = request.form.get('provider')
subs_id = request.form.get('subs_id')
language = request.form.get('language')
episodeInfo = TableEpisodes.select(TableEpisodes.path)\
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\
.dicts()\
.get()
media_path = episodeInfo['path']
subtitles_path = request.form.get('subtitles_path')
blacklist_log(sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id,
provider=provider,
subs_id=subs_id,
language=language)
delete_subtitles(media_type='series',
language=language,
forced=False,
hi=False,
media_path=path_mappings.path_replace(media_path),
subtitles_path=subtitles_path,
sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id)
episode_download_subtitles(sonarr_episode_id)
event_stream(type='episode-history')
return '', 200
@authenticate
def delete(self):
if request.args.get("all") == "true":
blacklist_delete_all()
else:
provider = request.form.get('provider')
subs_id = request.form.get('subs_id')
blacklist_delete(provider=provider, subs_id=subs_id)
return '', 204

View File

@ -0,0 +1,30 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from database import TableEpisodes
from ..utils import authenticate, postprocessEpisode
class Episodes(Resource):
@authenticate
def get(self):
seriesId = request.args.getlist('seriesid[]')
episodeId = request.args.getlist('episodeid[]')
if len(episodeId) > 0:
result = TableEpisodes.select().where(TableEpisodes.sonarrEpisodeId.in_(episodeId)).dicts()
elif len(seriesId) > 0:
result = TableEpisodes.select()\
.where(TableEpisodes.sonarrSeriesId.in_(seriesId))\
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\
.dicts()
else:
return "Series or Episode ID not provided", 400
result = list(result)
for item in result:
postprocessEpisode(item)
return jsonify(data=result)

View File

@ -0,0 +1,178 @@
# coding=utf-8
import os
from flask import request
from flask_restful import Resource
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from database import TableEpisodes, get_audio_profile_languages, get_profile_id
from ..utils import authenticate
from helper import path_mappings
from get_providers import get_providers, get_providers_auth
from get_subtitle import download_subtitle, manual_upload_subtitle
from utils import history_log, delete_subtitles
from notifier import send_notifications
from list_subtitles import store_subtitles
from event_handler import event_stream
from config import settings
# PATCH: Download Subtitles
# POST: Upload Subtitles
# DELETE: Delete Subtitles
class EpisodesSubtitles(Resource):
@authenticate
def patch(self):
sonarrSeriesId = request.args.get('seriesid')
sonarrEpisodeId = request.args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language)\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
.get()
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name']
audio_language = episodeInfo['audio_language']
if sceneName is None: sceneName = "None"
language = request.form.get('language')
hi = request.form.get('hi').capitalize()
forced = request.form.get('forced').capitalize()
providers_list = get_providers()
providers_auth = get_providers_auth()
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
try:
result = download_subtitle(episodePath, language, audio_language, hi, forced, providers_list,
providers_auth, sceneName, title, 'series',
profile_id=get_profile_id(episode_id=sonarrEpisodeId))
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
subs_path)
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
store_subtitles(path, episodePath)
else:
event_stream(type='episode', payload=sonarrEpisodeId)
except OSError:
pass
return '', 204
@authenticate
def post(self):
sonarrSeriesId = request.args.get('seriesid')
sonarrEpisodeId = request.args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language)\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
.get()
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name']
audio_language = episodeInfo['audio_language']
if sceneName is None: sceneName = "None"
language = request.form.get('language')
forced = True if request.form.get('forced') == 'true' else False
hi = True if request.form.get('hi') == 'true' else False
subFile = request.files.get('file')
_, ext = os.path.splitext(subFile.filename)
if ext not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
try:
result = manual_upload_subtitle(path=episodePath,
language=language,
forced=forced,
hi=hi,
title=title,
scene_name=sceneName,
media_type='series',
subtitle=subFile,
audio_language=audio_language)
if result is not None:
message = result[0]
path = result[1]
subs_path = result[2]
if hi:
language_code = language + ":hi"
elif forced:
language_code = language + ":forced"
else:
language_code = language
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score,
subtitles_path=subs_path)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
store_subtitles(path, episodePath)
except OSError:
pass
return '', 204
@authenticate
def delete(self):
sonarrSeriesId = request.args.get('seriesid')
sonarrEpisodeId = request.args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language)\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
.get()
episodePath = path_mappings.path_replace(episodeInfo['path'])
language = request.form.get('language')
forced = request.form.get('forced')
hi = request.form.get('hi')
subtitlesPath = request.form.get('path')
subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath)
delete_subtitles(media_type='series',
language=language,
forced=forced,
hi=hi,
media_path=episodePath,
subtitles_path=subtitlesPath,
sonarr_series_id=sonarrSeriesId,
sonarr_episode_id=sonarrEpisodeId)
return '', 204

View File

@ -0,0 +1,133 @@
# coding=utf-8
import datetime
import os
import operator
import pretty
from flask import request, jsonify
from flask_restful import Resource
from functools import reduce
from peewee import fn
from datetime import timedelta
from database import get_exclusion_clause, TableEpisodes, TableShows, TableHistory, TableBlacklist
from ..utils import authenticate, postprocessEpisode
from config import settings
from helper import path_mappings
class EpisodesHistory(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
episodeid = request.args.get('episodeid')
upgradable_episodes_not_perfect = []
if settings.general.getboolean('upgrade_subs'):
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime.datetime(1970, 1, 1)).total_seconds()
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 6]
else:
query_actions = [1, 3]
upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)),
(TableHistory.timestamp > minimum_timestamp),
(TableHistory.score is not None)]
upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path,
fn.MAX(TableHistory.timestamp).alias('timestamp'),
TableHistory.score,
TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType)\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, upgradable_episodes_conditions))\
.group_by(TableHistory.video_path)\
.dicts()
upgradable_episodes = list(upgradable_episodes)
for upgradable_episode in upgradable_episodes:
if upgradable_episode['timestamp'] > minimum_timestamp:
try:
int(upgradable_episode['score'])
except ValueError:
pass
else:
if int(upgradable_episode['score']) < 360:
upgradable_episodes_not_perfect.append(upgradable_episode)
query_conditions = [(TableEpisodes.title is not None)]
if episodeid:
query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid))
query_condition = reduce(operator.and_, query_conditions)
episode_history = TableHistory.select(TableHistory.id,
TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableHistory.timestamp,
TableHistory.subs_id,
TableHistory.description,
TableHistory.sonarrSeriesId,
TableEpisodes.path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableHistory.action,
TableHistory.subtitles_path,
TableHistory.sonarrEpisodeId,
TableHistory.provider,
TableShows.seriesType)\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(query_condition)\
.order_by(TableHistory.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
episode_history = list(episode_history)
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
blacklist_db = list(blacklist_db)
for item in episode_history:
# Mark episode as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored']),
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect:
if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])):
item.update({"upgradable": True})
del item['path']
postprocessEpisode(item)
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = int(item['timestamp'])
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item['timestamp'] = pretty.date(item["raw_timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
if item['action'] not in [0, 4, 5]:
for blacklisted_item in blacklist_db:
if blacklisted_item['provider'] == item['provider'] and \
blacklisted_item['subs_id'] == item['subs_id']:
item.update({"blacklisted": True})
break
count = TableHistory.select()\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(TableEpisodes.title is not None).count()
return jsonify(data=episode_history, total=count)

View File

@ -0,0 +1,74 @@
# coding=utf-8
import operator
from flask import request, jsonify
from flask_restful import Resource
from functools import reduce
from database import get_exclusion_clause, TableEpisodes, TableShows
from ..utils import authenticate, postprocessEpisode
# GET: Get Wanted Episodes
class EpisodesWanted(Resource):
@authenticate
def get(self):
episodeid = request.args.getlist('episodeid[]')
wanted_conditions = [(TableEpisodes.missing_subtitles != '[]')]
if len(episodeid) > 0:
wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid))
wanted_conditions += get_exclusion_clause('series')
wanted_condition = reduce(operator.and_, wanted_conditions)
if len(episodeid) > 0:
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name.alias('sceneName'),
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(wanted_condition)\
.dicts()
else:
start = request.args.get('start') or 0
length = request.args.get('length') or -1
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name.alias('sceneName'),
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(wanted_condition)\
.order_by(TableEpisodes.rowid.desc())\
.limit(length)\
.offset(start)\
.dicts()
data = list(data)
for item in data:
postprocessEpisode(item)
count_conditions = [(TableEpisodes.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('series')
count = TableEpisodes.select(TableShows.tags,
TableShows.seriesType,
TableEpisodes.monitored)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, count_conditions))\
.count()
return jsonify(data=data, total=count)

View File

@ -0,0 +1,16 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .files import BrowseBazarrFS
from .files_sonarr import BrowseSonarrFS
from .files_radarr import BrowseRadarrFS
api_bp_files = Blueprint('api_files', __name__)
api = Api(api_bp_files)
api.add_resource(BrowseBazarrFS, '/files')
api.add_resource(BrowseSonarrFS, '/files/sonarr')
api.add_resource(BrowseRadarrFS, '/files/radarr')

24
bazarr/api/files/files.py Normal file
View File

@ -0,0 +1,24 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from filesystem import browse_bazarr_filesystem
from ..utils import authenticate
class BrowseBazarrFS(Resource):
@authenticate
def get(self):
path = request.args.get('path') or ''
data = []
try:
result = browse_bazarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return jsonify([])
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return jsonify(data)

View File

@ -0,0 +1,24 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from filesystem import browse_radarr_filesystem
from ..utils import authenticate
class BrowseRadarrFS(Resource):
@authenticate
def get(self):
path = request.args.get('path') or ''
data = []
try:
result = browse_radarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return jsonify([])
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return jsonify(data)

View File

@ -0,0 +1,24 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from filesystem import browse_sonarr_filesystem
from ..utils import authenticate
class BrowseSonarrFS(Resource):
@authenticate
def get(self):
path = request.args.get('path') or ''
data = []
try:
result = browse_sonarr_filesystem(path)
if result is None:
raise ValueError
except Exception:
return jsonify([])
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return jsonify(data)

View File

@ -0,0 +1,12 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .stats import HistoryStats
api_bp_history = Blueprint('api_history', __name__)
api = Api(api_bp_history)
api.add_resource(HistoryStats, '/history/stats')

View File

@ -0,0 +1,85 @@
# coding=utf-8
import time
import datetime
import operator
from dateutil import rrule
from flask import request, jsonify
from flask_restful import Resource
from functools import reduce
from peewee import fn
from database import TableHistory, TableHistoryMovie
from ..utils import authenticate
class HistoryStats(Resource):
@authenticate
def get(self):
timeframe = request.args.get('timeframe') or 'month'
action = request.args.get('action') or 'All'
provider = request.args.get('provider') or 'All'
language = request.args.get('language') or 'All'
# timeframe must be in ['week', 'month', 'trimester', 'year']
if timeframe == 'year':
delay = 364 * 24 * 60 * 60
elif timeframe == 'trimester':
delay = 90 * 24 * 60 * 60
elif timeframe == 'month':
delay = 30 * 24 * 60 * 60
elif timeframe == 'week':
delay = 6 * 24 * 60 * 60
now = time.time()
past = now - delay
history_where_clauses = [(TableHistory.timestamp.between(past, now))]
history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))]
if action != 'All':
history_where_clauses.append((TableHistory.action == action))
history_where_clauses_movie.append((TableHistoryMovie.action == action))
else:
history_where_clauses.append((TableHistory.action.in_([1, 2, 3])))
history_where_clauses_movie.append((TableHistoryMovie.action.in_([1, 2, 3])))
if provider != 'All':
history_where_clauses.append((TableHistory.provider == provider))
history_where_clauses_movie.append((TableHistoryMovie.provider == provider))
if language != 'All':
history_where_clauses.append((TableHistory.language == language))
history_where_clauses_movie.append((TableHistoryMovie.language == language))
history_where_clause = reduce(operator.and_, history_where_clauses)
history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie)
data_series = TableHistory.select(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch').alias('date'),
fn.COUNT(TableHistory.id).alias('count'))\
.where(history_where_clause) \
.group_by(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch'))\
.dicts()
data_series = list(data_series)
data_movies = TableHistoryMovie.select(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch').alias('date'),
fn.COUNT(TableHistoryMovie.id).alias('count')) \
.where(history_where_clause_movie) \
.group_by(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch')) \
.dicts()
data_movies = list(data_movies)
for dt in rrule.rrule(rrule.DAILY,
dtstart=datetime.datetime.now() - datetime.timedelta(seconds=delay),
until=datetime.datetime.now()):
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_series):
data_series.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_movies):
data_movies.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0})
sorted_data_series = sorted(data_series, key=lambda i: i['date'])
sorted_data_movies = sorted(data_movies, key=lambda i: i['date'])
return jsonify(series=sorted_data_series, movies=sorted_data_movies)

View File

@ -0,0 +1,20 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .movies import Movies
from .movies_subtitles import MoviesSubtitles
from .history import MoviesHistory
from .wanted import MoviesWanted
from .blacklist import MoviesBlacklist
api_bp_movies = Blueprint('api_movies', __name__)
api = Api(api_bp_movies)
api.add_resource(Movies, '/movies')
api.add_resource(MoviesWanted, '/movies/wanted')
api.add_resource(MoviesSubtitles, '/movies/subtitles')
api.add_resource(MoviesHistory, '/movies/history')
api.add_resource(MoviesBlacklist, '/movies/blacklist')

View File

@ -0,0 +1,86 @@
# coding=utf-8
import datetime
import pretty
from flask import request, jsonify
from flask_restful import Resource
from database import TableMovies, TableBlacklistMovie
from ..utils import authenticate, postprocessMovie
from utils import blacklist_log_movie, delete_subtitles, blacklist_delete_all_movie, blacklist_delete_movie
from helper import path_mappings
from get_subtitle import movies_download_subtitles
from event_handler import event_stream
# GET: get blacklist
# POST: add blacklist
# DELETE: remove blacklist
class MoviesBlacklist(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
data = TableBlacklistMovie.select(TableMovies.title,
TableMovies.radarrId,
TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id,
TableBlacklistMovie.language,
TableBlacklistMovie.timestamp)\
.join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\
.order_by(TableBlacklistMovie.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
data = list(data)
for item in data:
postprocessMovie(item)
# Make timestamp pretty
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
return jsonify(data=data)
@authenticate
def post(self):
radarr_id = int(request.args.get('radarrid'))
provider = request.form.get('provider')
subs_id = request.form.get('subs_id')
language = request.form.get('language')
# TODO
forced = False
hi = False
data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get()
media_path = data['path']
subtitles_path = request.form.get('subtitles_path')
blacklist_log_movie(radarr_id=radarr_id,
provider=provider,
subs_id=subs_id,
language=language)
delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=path_mappings.path_replace_movie(media_path),
subtitles_path=subtitles_path,
radarr_id=radarr_id)
movies_download_subtitles(radarr_id)
event_stream(type='movie-history')
return '', 200
@authenticate
def delete(self):
if request.args.get("all") == "true":
blacklist_delete_all_movie()
else:
provider = request.form.get('provider')
subs_id = request.form.get('subs_id')
blacklist_delete_movie(provider=provider, subs_id=subs_id)
return '', 200

View File

@ -0,0 +1,129 @@
# coding=utf-8
import datetime
import os
import operator
import pretty
from flask import request, jsonify
from flask_restful import Resource
from functools import reduce
from peewee import fn
from datetime import timedelta
from database import get_exclusion_clause, TableMovies, TableHistoryMovie, TableBlacklistMovie
from ..utils import authenticate, postprocessMovie
from config import settings
from helper import path_mappings
class MoviesHistory(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
radarrid = request.args.get('radarrid')
upgradable_movies = []
upgradable_movies_not_perfect = []
if settings.general.getboolean('upgrade_subs'):
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime.datetime(1970, 1, 1)).total_seconds()
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 6]
else:
query_actions = [1, 3]
upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)),
(TableHistoryMovie.timestamp > minimum_timestamp),
(TableHistoryMovie.score is not None)]
upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
TableHistoryMovie.score,
TableMovies.tags,
TableMovies.monitored)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(reduce(operator.and_, upgradable_movies_conditions))\
.group_by(TableHistoryMovie.video_path)\
.dicts()
upgradable_movies = list(upgradable_movies)
for upgradable_movie in upgradable_movies:
if upgradable_movie['timestamp'] > minimum_timestamp:
try:
int(upgradable_movie['score'])
except ValueError:
pass
else:
if int(upgradable_movie['score']) < 120:
upgradable_movies_not_perfect.append(upgradable_movie)
query_conditions = [(TableMovies.title is not None)]
if radarrid:
query_conditions.append((TableMovies.radarrId == radarrid))
query_condition = reduce(operator.and_, query_conditions)
movie_history = TableHistoryMovie.select(TableHistoryMovie.id,
TableHistoryMovie.action,
TableMovies.title,
TableHistoryMovie.timestamp,
TableHistoryMovie.description,
TableHistoryMovie.radarrId,
TableMovies.monitored,
TableHistoryMovie.video_path.alias('path'),
TableHistoryMovie.language,
TableMovies.tags,
TableHistoryMovie.score,
TableHistoryMovie.subs_id,
TableHistoryMovie.provider,
TableHistoryMovie.subtitles_path)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(query_condition)\
.order_by(TableHistoryMovie.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
movie_history = list(movie_history)
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
blacklist_db = list(blacklist_db)
for item in movie_history:
# Mark movies as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect:
if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])):
item.update({"upgradable": True})
del item['path']
postprocessMovie(item)
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = int(item['timestamp'])
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item['timestamp'] = pretty.date(item["raw_timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
if item['action'] not in [0, 4, 5]:
for blacklisted_item in blacklist_db:
if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[
'subs_id']:
item.update({"blacklisted": True})
break
count = TableHistoryMovie.select()\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(TableMovies.title is not None)\
.count()
return jsonify(data=movie_history, total=count)

View File

@ -0,0 +1,80 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from database import TableMovies
from ..utils import authenticate, postprocessMovie, None_Keys
from list_subtitles import list_missing_subtitles_movies, movies_scan_subtitles
from event_handler import event_stream
from get_subtitle import movies_download_subtitles, wanted_search_missing_subtitles_movies
class Movies(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
radarrId = request.args.getlist('radarrid[]')
count = TableMovies.select().count()
if len(radarrId) != 0:
result = TableMovies.select()\
.where(TableMovies.radarrId.in_(radarrId))\
.order_by(TableMovies.sortTitle)\
.dicts()
else:
result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts()
result = list(result)
for item in result:
postprocessMovie(item)
return jsonify(data=result, total=count)
@authenticate
def post(self):
radarrIdList = request.form.getlist('radarrid')
profileIdList = request.form.getlist('profileid')
for idx in range(len(radarrIdList)):
radarrId = radarrIdList[idx]
profileId = profileIdList[idx]
if profileId in None_Keys:
profileId = None
else:
try:
profileId = int(profileId)
except Exception:
return '', 400
TableMovies.update({
TableMovies.profileId: profileId
})\
.where(TableMovies.radarrId == radarrId)\
.execute()
list_missing_subtitles_movies(no=radarrId, send_event=False)
event_stream(type='movie', payload=radarrId)
event_stream(type='movie-wanted', payload=radarrId)
event_stream(type='badges')
return '', 204
@authenticate
def patch(self):
radarrid = request.form.get('radarrid')
action = request.form.get('action')
if action == "scan-disk":
movies_scan_subtitles(radarrid)
return '', 204
elif action == "search-missing":
movies_download_subtitles(radarrid)
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_movies()
return '', 204
return '', 400

View File

@ -0,0 +1,176 @@
# coding=utf-8
import os
from flask import request
from flask_restful import Resource
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from database import TableMovies, get_audio_profile_languages, get_profile_id
from ..utils import authenticate
from helper import path_mappings
from get_providers import get_providers, get_providers_auth
from get_subtitle import download_subtitle, manual_upload_subtitle
from utils import history_log_movie, delete_subtitles
from notifier import send_notifications_movie
from list_subtitles import store_subtitles_movie
from event_handler import event_stream
from config import settings
# PATCH: Download Subtitles
# POST: Upload Subtitles
# DELETE: Delete Subtitles
class MoviesSubtitles(Resource):
@authenticate
def patch(self):
# Download
radarrId = request.args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)\
.where(TableMovies.radarrId == radarrId)\
.dicts()\
.get()
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName']
if sceneName is None: sceneName = 'None'
title = movieInfo['title']
audio_language = movieInfo['audio_language']
language = request.form.get('language')
hi = request.form.get('hi').capitalize()
forced = request.form.get('forced').capitalize()
providers_list = get_providers()
providers_auth = get_providers_auth()
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
try:
result = download_subtitle(moviePath, language, audio_language, hi, forced, providers_list,
providers_auth, sceneName, title, 'movie',
profile_id=get_profile_id(movie_id=radarrId))
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
history_log_movie(1, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
else:
event_stream(type='movie', payload=radarrId)
except OSError:
pass
return '', 204
@authenticate
def post(self):
# Upload
# TODO: Support Multiply Upload
radarrId = request.args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get()
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName']
if sceneName is None: sceneName = 'None'
title = movieInfo['title']
audioLanguage = movieInfo['audio_language']
language = request.form.get('language')
forced = True if request.form.get('forced') == 'true' else False
hi = True if request.form.get('hi') == 'true' else False
subFile = request.files.get('file')
_, ext = os.path.splitext(subFile.filename)
if ext not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
try:
result = manual_upload_subtitle(path=moviePath,
language=language,
forced=forced,
hi=hi,
title=title,
scene_name=sceneName,
media_type='movie',
subtitle=subFile,
audio_language=audioLanguage)
if result is not None:
message = result[0]
path = result[1]
subs_path = result[2]
if hi:
language_code = language + ":hi"
elif forced:
language_code = language + ":forced"
else:
language_code = language
provider = "manual"
score = 120
history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
except OSError:
pass
return '', 204
@authenticate
def delete(self):
# Delete
radarrId = request.args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.path) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get()
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
language = request.form.get('language')
forced = request.form.get('forced')
hi = request.form.get('hi')
subtitlesPath = request.form.get('path')
subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath)
result = delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=moviePath,
subtitles_path=subtitlesPath,
radarr_id=radarrId)
if result:
return '', 202
else:
return '', 204

View File

@ -0,0 +1,62 @@
# coding=utf-8
import operator
from flask import request, jsonify
from flask_restful import Resource
from functools import reduce
from database import get_exclusion_clause, TableMovies
from ..utils import authenticate, postprocessMovie
# GET: Get Wanted Movies
class MoviesWanted(Resource):
@authenticate
def get(self):
radarrid = request.args.getlist("radarrid[]")
wanted_conditions = [(TableMovies.missing_subtitles != '[]')]
if len(radarrid) > 0:
wanted_conditions.append((TableMovies.radarrId.in_(radarrid)))
wanted_conditions += get_exclusion_clause('movie')
wanted_condition = reduce(operator.and_, wanted_conditions)
if len(radarrid) > 0:
result = TableMovies.select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.tags,
TableMovies.monitored)\
.where(wanted_condition)\
.dicts()
else:
start = request.args.get('start') or 0
length = request.args.get('length') or -1
result = TableMovies.select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.tags,
TableMovies.monitored)\
.where(wanted_condition)\
.order_by(TableMovies.rowid.desc())\
.limit(length)\
.offset(start)\
.dicts()
result = list(result)
for item in result:
postprocessMovie(item)
count_conditions = [(TableMovies.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('movie')
count = TableMovies.select(TableMovies.monitored,
TableMovies.tags)\
.where(reduce(operator.and_, count_conditions))\
.count()
return jsonify(data=result, total=count)

View File

@ -0,0 +1,16 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .providers import Providers
from .providers_episodes import ProviderEpisodes
from .providers_movies import ProviderMovies
api_bp_providers = Blueprint('api_providers', __name__)
api = Api(api_bp_providers)
api.add_resource(Providers, '/providers')
api.add_resource(ProviderMovies, '/providers/movies')
api.add_resource(ProviderEpisodes, '/providers/episodes')

View File

@ -0,0 +1,52 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from operator import itemgetter
from database import TableHistory, TableHistoryMovie
from get_providers import list_throttled_providers, reset_throttled_providers
from ..utils import authenticate, False_Keys
class Providers(Resource):
@authenticate
def get(self):
history = request.args.get('history')
if history and history not in False_Keys:
providers = list(TableHistory.select(TableHistory.provider)
.where(TableHistory.provider != None and TableHistory.provider != "manual")
.dicts())
providers += list(TableHistoryMovie.select(TableHistoryMovie.provider)
.where(TableHistoryMovie.provider != None and TableHistoryMovie.provider != "manual")
.dicts())
providers_list = list(set([x['provider'] for x in providers]))
providers_dicts = []
for provider in providers_list:
providers_dicts.append({
'name': provider,
'status': 'History',
'retry': '-'
})
return jsonify(data=sorted(providers_dicts, key=itemgetter('name')))
throttled_providers = list_throttled_providers()
providers = list()
for provider in throttled_providers:
providers.append({
"name": provider[0],
"status": provider[1] if provider[1] is not None else "Good",
"retry": provider[2] if provider[2] != "now" else "-"
})
return jsonify(data=providers)
@authenticate
def post(self):
action = request.form.get('action')
if action == 'reset':
reset_throttled_providers()
return '', 204
return '', 400

View File

@ -0,0 +1,104 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id
from helper import path_mappings
from get_providers import get_providers, get_providers_auth
from get_subtitle import manual_search, manual_download_subtitle
from utils import history_log
from config import settings
from notifier import send_notifications
from list_subtitles import store_subtitles
from ..utils import authenticate
class ProviderEpisodes(Resource):
@authenticate
def get(self):
# Manual Search
sonarrEpisodeId = request.args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableShows.profileId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get()
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name']
profileId = episodeInfo['profileId']
if sceneName is None: sceneName = "None"
providers_list = get_providers()
providers_auth = get_providers_auth()
data = manual_search(episodePath, profileId, providers_list, providers_auth, sceneName, title,
'series')
if not data:
data = []
return jsonify(data=data)
@authenticate
def post(self):
# Manual Download
sonarrSeriesId = request.args.get('seriesid')
sonarrEpisodeId = request.args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get()
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name']
if sceneName is None: sceneName = "None"
language = request.form.get('language')
hi = request.form.get('hi').capitalize()
forced = request.form.get('forced').capitalize()
selected_provider = request.form.get('provider')
subtitle = request.form.get('subtitle')
providers_auth = get_providers_auth()
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
try:
result = manual_download_subtitle(episodePath, language, audio_language, hi, forced, subtitle,
selected_provider, providers_auth, sceneName, title, 'series',
profile_id=get_profile_id(episode_id=sonarrEpisodeId))
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
subs_path)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
store_subtitles(path, episodePath)
return result, 201
except OSError:
pass
return '', 204

View File

@ -0,0 +1,103 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from database import TableMovies, get_audio_profile_languages, get_profile_id
from helper import path_mappings
from get_providers import get_providers, get_providers_auth
from get_subtitle import manual_search, manual_download_subtitle
from utils import history_log_movie
from config import settings
from notifier import send_notifications_movie
from list_subtitles import store_subtitles_movie
from ..utils import authenticate
class ProviderMovies(Resource):
@authenticate
def get(self):
# Manual Search
radarrId = request.args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.profileId) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get()
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName']
profileId = movieInfo['profileId']
if sceneName is None: sceneName = "None"
providers_list = get_providers()
providers_auth = get_providers_auth()
data = manual_search(moviePath, profileId, providers_list, providers_auth, sceneName, title,
'movie')
if not data:
data = []
return jsonify(data=data)
@authenticate
def post(self):
# Manual Download
radarrId = request.args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get()
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName']
if sceneName is None: sceneName = "None"
audio_language = movieInfo['audio_language']
language = request.form.get('language')
hi = request.form.get('hi').capitalize()
forced = request.form.get('forced').capitalize()
selected_provider = request.form.get('provider')
subtitle = request.form.get('subtitle')
providers_auth = get_providers_auth()
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
try:
result = manual_download_subtitle(moviePath, language, audio_language, hi, forced, subtitle,
selected_provider, providers_auth, sceneName, title, 'movie',
profile_id=get_profile_id(movie_id=radarrId))
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
except OSError:
pass
return '', 204

View File

@ -0,0 +1,12 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .series import Series
api_bp_series = Blueprint('api_series', __name__)
api = Api(api_bp_series)
api.add_resource(Series, '/series')

114
bazarr/api/series/series.py Normal file
View File

@ -0,0 +1,114 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
import operator
from functools import reduce
from database import get_exclusion_clause, TableEpisodes, TableShows
from list_subtitles import list_missing_subtitles, series_scan_subtitles
from get_subtitle import series_download_subtitles, wanted_search_missing_subtitles_series
from ..utils import authenticate, postprocessSeries, None_Keys
from event_handler import event_stream
class Series(Resource):
@authenticate
def get(self):
start = request.args.get('start') or 0
length = request.args.get('length') or -1
seriesId = request.args.getlist('seriesid[]')
count = TableShows.select().count()
if len(seriesId) != 0:
result = TableShows.select() \
.where(TableShows.sonarrSeriesId.in_(seriesId)) \
.order_by(TableShows.sortTitle).dicts()
else:
result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts()
result = list(result)
for item in result:
postprocessSeries(item)
# Add missing subtitles episode count
episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']),
(TableEpisodes.missing_subtitles != '[]')]
episodes_missing_conditions += get_exclusion_clause('series')
episodeMissingCount = TableEpisodes.select(TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, episodes_missing_conditions)) \
.count()
item.update({"episodeMissingCount": episodeMissingCount})
# Add episode count
episodeFileCount = TableEpisodes.select(TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']) \
.count()
item.update({"episodeFileCount": episodeFileCount})
return jsonify(data=result, total=count)
@authenticate
def post(self):
seriesIdList = request.form.getlist('seriesid')
profileIdList = request.form.getlist('profileid')
for idx in range(len(seriesIdList)):
seriesId = seriesIdList[idx]
profileId = profileIdList[idx]
if profileId in None_Keys:
profileId = None
else:
try:
profileId = int(profileId)
except Exception:
return '', 400
TableShows.update({
TableShows.profileId: profileId
}) \
.where(TableShows.sonarrSeriesId == seriesId) \
.execute()
list_missing_subtitles(no=seriesId, send_event=False)
event_stream(type='series', payload=seriesId)
episode_id_list = TableEpisodes \
.select(TableEpisodes.sonarrEpisodeId) \
.where(TableEpisodes.sonarrSeriesId == seriesId) \
.dicts()
for item in episode_id_list:
event_stream(type='episode-wanted', payload=item['sonarrEpisodeId'])
event_stream(type='badges')
return '', 204
@authenticate
def patch(self):
seriesid = request.form.get('seriesid')
action = request.form.get('action')
if action == "scan-disk":
series_scan_subtitles(seriesid)
return '', 204
elif action == "search-missing":
series_download_subtitles(seriesid)
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_series()
return '', 204
return '', 400

View File

@ -0,0 +1,14 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .subtitles import Subtitles
from .subtitles_info import SubtitleNameInfo
api_bp_subtitles = Blueprint('api_subtitles', __name__)
api = Api(api_bp_subtitles)
api.add_resource(Subtitles, '/subtitles')
api.add_resource(SubtitleNameInfo, '/subtitles/info')

View File

@ -0,0 +1,72 @@
# coding=utf-8
import os
import sys
from flask import request
from flask_restful import Resource
from database import TableEpisodes, TableMovies
from helper import path_mappings
from ..utils import authenticate
from subsyncer import subsync
from utils import translate_subtitles_file, subtitles_apply_mods
from get_subtitle import store_subtitles, store_subtitles_movie
from config import settings
class Subtitles(Resource):
@authenticate
def patch(self):
action = request.args.get('action')
language = request.form.get('language')
subtitles_path = request.form.get('path')
media_type = request.form.get('type')
id = request.form.get('id')
if media_type == 'episode':
subtitles_path = path_mappings.path_replace(subtitles_path)
metadata = TableEpisodes.select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)\
.where(TableEpisodes.sonarrEpisodeId == id)\
.dicts()\
.get()
video_path = path_mappings.path_replace(metadata['path'])
else:
subtitles_path = path_mappings.path_replace_movie(subtitles_path)
metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get()
video_path = path_mappings.path_replace_movie(metadata['path'])
if action == 'sync':
if media_type == 'episode':
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='series', sonarr_series_id=metadata['sonarrSeriesId'],
sonarr_episode_id=int(id))
else:
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='movies', radarr_id=id)
elif action == 'translate':
dest_language = language
forced = True if request.form.get('forced') == 'true' else False
hi = True if request.form.get('hi') == 'true' else False
result = translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
to_lang=dest_language,
forced=forced, hi=hi)
if result:
if media_type == 'episode':
store_subtitles(path_mappings.path_replace_reverse(video_path), video_path)
else:
store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path)
return '', 200
else:
return '', 404
else:
subtitles_apply_mods(language, subtitles_path, [action])
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
if chmod:
os.chmod(subtitles_path, chmod)
return '', 204

View File

@ -0,0 +1,41 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from subliminal_patch.core import guessit
from ..utils import authenticate
class SubtitleNameInfo(Resource):
@authenticate
def get(self):
names = request.args.getlist('filenames[]')
results = []
for name in names:
opts = dict()
opts['type'] = 'episode'
guessit_result = guessit(name, options=opts)
result = {}
result['filename'] = name
if 'subtitle_language' in guessit_result:
result['subtitle_language'] = str(guessit_result['subtitle_language'])
result['episode'] = 0
if 'episode' in guessit_result:
if isinstance(guessit_result['episode'], list):
# for multiple episodes file, choose the first episode number
if len(guessit_result['episode']):
# make sure that guessit returned a list of more than 0 items
result['episode'] = int(guessit_result['episode'][0])
elif isinstance(guessit_result['episode'], (str, int)):
# if single episode (should be int but just in case we cast it to int)
result['episode'] = int(guessit_result['episode'])
if 'season' in guessit_result:
result['season'] = int(guessit_result['season'])
else:
result['season'] = 0
results.append(result)
return jsonify(data=results)

View File

@ -0,0 +1,33 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .system import System
from .searches import Searches
from .account import SystemAccount
from .tasks import SystemTasks
from .logs import SystemLogs
from .status import SystemStatus
from .health import SystemHealth
from .releases import SystemReleases
from .settings import SystemSettings
from .languages import Languages
from .languages_profiles import LanguagesProfiles
from .notifications import Notifications
api_bp_system = Blueprint('api_system', __name__)
api = Api(api_bp_system)
api.add_resource(System, '/system')
api.add_resource(Searches, '/system/searches')
api.add_resource(SystemAccount, '/system/account')
api.add_resource(SystemTasks, '/system/tasks')
api.add_resource(SystemLogs, '/system/logs')
api.add_resource(SystemStatus, '/system/status')
api.add_resource(SystemHealth, '/system/health')
api.add_resource(SystemReleases, '/system/releases')
api.add_resource(SystemSettings, '/system/settings')
api.add_resource(Languages, '/system/languages')
api.add_resource(LanguagesProfiles, '/system/languages/profiles')
api.add_resource(Notifications, '/system/notifications')

View File

@ -0,0 +1,29 @@
# coding=utf-8
import gc
from flask import request, session
from flask_restful import Resource
from config import settings
from utils import check_credentials
class SystemAccount(Resource):
def post(self):
if settings.auth.type != 'form':
return '', 405
action = request.args.get('action')
if action == 'login':
username = request.form.get('username')
password = request.form.get('password')
if check_credentials(username, password):
session['logged_in'] = True
return '', 204
elif action == 'logout':
session.clear()
gc.collect()
return '', 204
return '', 401

View File

@ -0,0 +1,13 @@
# coding=utf-8
from flask import jsonify
from flask_restful import Resource
from ..utils import authenticate
from utils import get_health_issues
class SystemHealth(Resource):
@authenticate
def get(self):
return jsonify(data=get_health_issues())

View File

@ -0,0 +1,54 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from operator import itemgetter
from ..utils import authenticate, False_Keys
from database import TableHistory, TableHistoryMovie, TableSettingsLanguages
from get_languages import alpha2_from_alpha3, language_from_alpha2
class Languages(Resource):
@authenticate
def get(self):
history = request.args.get('history')
if history and history not in False_Keys:
languages = list(TableHistory.select(TableHistory.language)
.where(TableHistory.language != None)
.dicts())
languages += list(TableHistoryMovie.select(TableHistoryMovie.language)
.where(TableHistoryMovie.language != None)
.dicts())
languages_list = list(set([l['language'].split(':')[0] for l in languages]))
languages_dicts = []
for language in languages_list:
code2 = None
if len(language) == 2:
code2 = language
elif len(language) == 3:
code2 = alpha2_from_alpha3(language)
else:
continue
if not any(x['code2'] == code2 for x in languages_dicts):
try:
languages_dicts.append({
'code2': code2,
'name': language_from_alpha2(code2),
# Compatibility: Use false temporarily
'enabled': False
})
except:
continue
return jsonify(sorted(languages_dicts, key=itemgetter('name')))
result = TableSettingsLanguages.select(TableSettingsLanguages.name,
TableSettingsLanguages.code2,
TableSettingsLanguages.enabled)\
.order_by(TableSettingsLanguages.name).dicts()
result = list(result)
for item in result:
item['enabled'] = item['enabled'] == 1
return jsonify(result)

View File

@ -0,0 +1,13 @@
# coding=utf-8
from flask import jsonify
from flask_restful import Resource
from ..utils import authenticate
from database import get_profiles_list
class LanguagesProfiles(Resource):
@authenticate
def get(self):
return jsonify(get_profiles_list())

41
bazarr/api/system/logs.py Normal file
View File

@ -0,0 +1,41 @@
# coding=utf-8
import io
import os
from flask import jsonify
from flask_restful import Resource
from ..utils import authenticate
from logger import empty_log
from get_args import args
class SystemLogs(Resource):
@authenticate
def get(self):
logs = []
with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file:
raw_lines = file.read()
lines = raw_lines.split('|\n')
for line in lines:
if line == '':
continue
raw_message = line.split('|')
raw_message_len = len(raw_message)
if raw_message_len > 3:
log = dict()
log["timestamp"] = raw_message[0]
log["type"] = raw_message[1].rstrip()
log["message"] = raw_message[3]
if raw_message_len > 4 and raw_message[4] != '\n':
log['exception'] = raw_message[4].strip('\'').replace(' ', '\u2003\u2003')
logs.append(log)
logs.reverse()
return jsonify(data=logs)
@authenticate
def delete(self):
empty_log()
return '', 204

View File

@ -0,0 +1,27 @@
# coding=utf-8
import apprise
from flask import request
from flask_restful import Resource
from ..utils import authenticate
class Notifications(Resource):
@authenticate
def patch(self):
url = request.form.get("url")
asset = apprise.AppriseAsset(async_mode=False)
apobj = apprise.Apprise(asset=asset)
apobj.add(url)
apobj.notify(
title='Bazarr test notification',
body='Test notification'
)
return '', 204

View File

@ -0,0 +1,47 @@
# coding=utf-8
import io
import json
import os
import logging
from flask import jsonify
from flask_restful import Resource
from ..utils import authenticate
from config import settings
from get_args import args
class SystemReleases(Resource):
@authenticate
def get(self):
filtered_releases = []
try:
with io.open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r', encoding='UTF-8') as f:
releases = json.loads(f.read())
for release in releases:
if settings.general.branch == 'master' and not release['prerelease']:
filtered_releases.append(release)
elif settings.general.branch != 'master' and any(not x['prerelease'] for x in filtered_releases):
continue
elif settings.general.branch != 'master':
filtered_releases.append(release)
if settings.general.branch == 'master':
filtered_releases = filtered_releases[:5]
current_version = os.environ["BAZARR_VERSION"]
for i, release in enumerate(filtered_releases):
body = release['body'].replace('- ', '').split('\n')[1:]
filtered_releases[i] = {"body": body,
"name": release['name'],
"date": release['date'][:10],
"prerelease": release['prerelease'],
"current": release['name'].lstrip('v') == current_version}
except Exception:
logging.exception(
'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt'))
return jsonify(data=filtered_releases)

View File

@ -0,0 +1,41 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from ..utils import authenticate
from config import settings
from database import TableShows, TableMovies
class Searches(Resource):
@authenticate
def get(self):
query = request.args.get('query')
search_list = []
if query:
if settings.general.getboolean('use_sonarr'):
# Get matching series
series = TableShows.select(TableShows.title,
TableShows.sonarrSeriesId,
TableShows.year)\
.where(TableShows.title.contains(query))\
.order_by(TableShows.title)\
.dicts()
series = list(series)
search_list += series
if settings.general.getboolean('use_radarr'):
# Get matching movies
movies = TableMovies.select(TableMovies.title,
TableMovies.radarrId,
TableMovies.year) \
.where(TableMovies.title.contains(query)) \
.order_by(TableMovies.title) \
.dicts()
movies = list(movies)
search_list += movies
return jsonify(search_list)

View File

@ -0,0 +1,106 @@
# coding=utf-8
import json
from flask import request, jsonify
from flask_restful import Resource
from ..utils import authenticate
from database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, TableSettingsNotifier, \
update_profile_id_list
from event_handler import event_stream
from config import settings, save_settings, get_settings
from scheduler import scheduler
from list_subtitles import list_missing_subtitles, list_missing_subtitles_movies
class SystemSettings(Resource):
@authenticate
def get(self):
data = get_settings()
notifications = TableSettingsNotifier.select().order_by(TableSettingsNotifier.name).dicts()
notifications = list(notifications)
for i, item in enumerate(notifications):
item["enabled"] = item["enabled"] == 1
notifications[i] = item
data['notifications'] = dict()
data['notifications']['providers'] = notifications
return jsonify(data)
@authenticate
def post(self):
enabled_languages = request.form.getlist('languages-enabled')
if len(enabled_languages) != 0:
TableSettingsLanguages.update({
TableSettingsLanguages.enabled: 0
}).execute()
for code in enabled_languages:
TableSettingsLanguages.update({
TableSettingsLanguages.enabled: 1
})\
.where(TableSettingsLanguages.code2 == code)\
.execute()
event_stream("languages")
languages_profiles = request.form.get('languages-profiles')
if languages_profiles:
existing_ids = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId).dicts()
existing_ids = list(existing_ids)
existing = [x['profileId'] for x in existing_ids]
for item in json.loads(languages_profiles):
if item['profileId'] in existing:
# Update existing profiles
TableLanguagesProfiles.update({
TableLanguagesProfiles.name: item['name'],
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
TableLanguagesProfiles.items: json.dumps(item['items']),
TableLanguagesProfiles.mustContain: item['mustContain'],
TableLanguagesProfiles.mustNotContain: item['mustNotContain'],
})\
.where(TableLanguagesProfiles.profileId == item['profileId'])\
.execute()
existing.remove(item['profileId'])
else:
# Add new profiles
TableLanguagesProfiles.insert({
TableLanguagesProfiles.profileId: item['profileId'],
TableLanguagesProfiles.name: item['name'],
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
TableLanguagesProfiles.items: json.dumps(item['items']),
TableLanguagesProfiles.mustContain: item['mustContain'],
TableLanguagesProfiles.mustNotContain: item['mustNotContain'],
}).execute()
for profileId in existing:
# Unassign this profileId from series and movies
TableShows.update({
TableShows.profileId: None
}).where(TableShows.profileId == profileId).execute()
TableMovies.update({
TableMovies.profileId: None
}).where(TableMovies.profileId == profileId).execute()
# Remove deleted profiles
TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute()
update_profile_id_list()
event_stream("languages")
if settings.general.getboolean('use_sonarr'):
scheduler.add_job(list_missing_subtitles, kwargs={'send_event': False})
if settings.general.getboolean('use_radarr'):
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': False})
# Update Notification
notifications = request.form.getlist('notifications-providers')
for item in notifications:
item = json.loads(item)
TableSettingsNotifier.update({
TableSettingsNotifier.enabled: item['enabled'],
TableSettingsNotifier.url: item['url']
}).where(TableSettingsNotifier.name == item['name']).execute()
save_settings(zip(request.form.keys(), request.form.listvalues()))
event_stream("settings")
return '', 204

View File

@ -0,0 +1,27 @@
# coding=utf-8
import os
import platform
from flask import jsonify
from flask_restful import Resource
from ..utils import authenticate
from utils import get_sonarr_info, get_radarr_info
from get_args import args
from init import startTime
class SystemStatus(Resource):
@authenticate
def get(self):
system_status = {}
system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]})
system_status.update({'sonarr_version': get_sonarr_info.version()})
system_status.update({'radarr_version': get_radarr_info.version()})
system_status.update({'operating_system': platform.platform()})
system_status.update({'python_version': platform.python_version()})
system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(__file__))})
system_status.update({'bazarr_config_directory': args.config_dir})
system_status.update({'start_time': startTime})
return jsonify(data=system_status)

View File

@ -0,0 +1,18 @@
# coding=utf-8
from flask import request
from flask_restful import Resource
from ..utils import authenticate
class System(Resource):
@authenticate
def post(self):
from server import webserver
action = request.args.get('action')
if action == "shutdown":
webserver.shutdown()
elif action == "restart":
webserver.restart()
return '', 204

View File

@ -0,0 +1,31 @@
# coding=utf-8
from flask import request, jsonify
from flask_restful import Resource
from ..utils import authenticate
from scheduler import scheduler
class SystemTasks(Resource):
@authenticate
def get(self):
taskid = request.args.get('taskid')
task_list = scheduler.get_task_list()
if taskid:
for item in task_list:
if item['job_id'] == taskid:
task_list = [item]
continue
return jsonify(data=task_list)
@authenticate
def post(self):
taskid = request.form.get('taskid')
scheduler.execute_job_now(taskid)
return '', 204

239
bazarr/api/utils.py Normal file
View File

@ -0,0 +1,239 @@
# coding=utf-8
import ast
from functools import wraps
from flask import request, abort
from operator import itemgetter
from config import settings, base_url
from get_languages import language_from_alpha2, alpha3_from_alpha2
from database import get_audio_profile_languages, get_desired_languages
from helper import path_mappings
None_Keys = ['null', 'undefined', '', None]
False_Keys = ['False', 'false', '0']
def authenticate(actual_method):
@wraps(actual_method)
def wrapper(*args, **kwargs):
apikey_settings = settings.auth.apikey
apikey_get = request.args.get('apikey')
apikey_post = request.form.get('apikey')
apikey_header = None
if 'X-API-KEY' in request.headers:
apikey_header = request.headers['X-API-KEY']
if apikey_settings in [apikey_get, apikey_post, apikey_header]:
return actual_method(*args, **kwargs)
return abort(401)
return wrapper
def postprocess(item):
# Remove ffprobe_cache
if 'ffprobe_cache' in item:
del (item['ffprobe_cache'])
# Parse tags
if 'tags' in item:
if item['tags'] is None:
item['tags'] = []
else:
item['tags'] = ast.literal_eval(item['tags'])
if 'monitored' in item:
if item['monitored'] is None:
item['monitored'] = False
else:
item['monitored'] = item['monitored'] == 'True'
if 'hearing_impaired' in item and item['hearing_impaired'] is not None:
if item['hearing_impaired'] is None:
item['hearing_impaired'] = False
else:
item['hearing_impaired'] = item['hearing_impaired'] == 'True'
if 'language' in item:
if item['language'] == 'None':
item['language'] = None
elif item['language'] is not None:
splitted_language = item['language'].split(':')
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
"code2": splitted_language[0],
"code3": alpha3_from_alpha2(splitted_language[0]),
"forced": True if item['language'].endswith(':forced') else False,
"hi": True if item['language'].endswith(':hi') else False}
def postprocessSeries(item):
postprocess(item)
# Parse audio language
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId'])
if 'alternateTitles' in item:
if item['alternateTitles'] is None:
item['alternativeTitles'] = []
else:
item['alternativeTitles'] = ast.literal_eval(item['alternateTitles'])
del item["alternateTitles"]
# Parse seriesType
if 'seriesType' in item and item['seriesType'] is not None:
item['seriesType'] = item['seriesType'].capitalize()
if 'path' in item:
item['path'] = path_mappings.path_replace(item['path'])
# map poster and fanart to server proxy
if 'poster' in item:
poster = item['poster']
item['poster'] = f"{base_url}/images/series{poster}" if poster else None
if 'fanart' in item:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/series{fanart}" if fanart else None
def postprocessEpisode(item):
postprocess(item)
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId'])
if 'subtitles' in item:
if item['subtitles'] is None:
raw_subtitles = []
else:
raw_subtitles = ast.literal_eval(item['subtitles'])
subtitles = []
for subs in raw_subtitles:
subtitle = subs[0].split(':')
sub = {"name": language_from_alpha2(subtitle[0]),
"code2": subtitle[0],
"code3": alpha3_from_alpha2(subtitle[0]),
"path": path_mappings.path_replace(subs[1]),
"forced": False,
"hi": False}
if len(subtitle) > 1:
sub["forced"] = True if subtitle[1] == 'forced' else False
sub["hi"] = True if subtitle[1] == 'hi' else False
subtitles.append(sub)
item.update({"subtitles": subtitles})
# Parse missing subtitles
if 'missing_subtitles' in item:
if item['missing_subtitles'] is None:
item['missing_subtitles'] = []
else:
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
for i, subs in enumerate(item['missing_subtitles']):
subtitle = subs.split(':')
item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]),
"code2": subtitle[0],
"code3": alpha3_from_alpha2(subtitle[0]),
"forced": False,
"hi": False}
if len(subtitle) > 1:
item['missing_subtitles'][i].update({
"forced": True if subtitle[1] == 'forced' else False,
"hi": True if subtitle[1] == 'hi' else False
})
if 'scene_name' in item:
item["sceneName"] = item["scene_name"]
del item["scene_name"]
if 'path' in item and item['path']:
# Provide mapped path
item['path'] = path_mappings.path_replace(item['path'])
# TODO: Move
def postprocessMovie(item):
postprocess(item)
# Parse audio language
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId'])
# Parse alternate titles
if 'alternativeTitles' in item:
if item['alternativeTitles'] is None:
item['alternativeTitles'] = []
else:
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
# Parse failed attempts
if 'failedAttempts' in item:
if item['failedAttempts']:
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
# Parse subtitles
if 'subtitles' in item:
if item['subtitles'] is None:
item['subtitles'] = []
else:
item['subtitles'] = ast.literal_eval(item['subtitles'])
for i, subs in enumerate(item['subtitles']):
language = subs[0].split(':')
item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]),
"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False}
if len(language) > 1:
item['subtitles'][i].update({
"forced": True if language[1] == 'forced' else False,
"hi": True if language[1] == 'hi' else False
})
if settings.general.getboolean('embedded_subs_show_desired'):
desired_lang_list = get_desired_languages(item['profileId'])
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
# Parse missing subtitles
if 'missing_subtitles' in item:
if item['missing_subtitles'] is None:
item['missing_subtitles'] = []
else:
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
for i, subs in enumerate(item['missing_subtitles']):
language = subs.split(':')
item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False}
if len(language) > 1:
item['missing_subtitles'][i].update({
"forced": True if language[1] == 'forced' else False,
"hi": True if language[1] == 'hi' else False
})
# Provide mapped path
if 'path' in item:
if item['path']:
item['path'] = path_mappings.path_replace_movie(item['path'])
if 'subtitles_path' in item:
# Provide mapped subtitles path
item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path'])
# map poster and fanart to server proxy
if 'poster' in item:
poster = item['poster']
item['poster'] = f"{base_url}/images/movies{poster}" if poster else None
if 'fanart' in item:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/movies{fanart}" if fanart else None

View File

@ -0,0 +1,12 @@
# coding=utf-8
from flask import Blueprint
from flask_restful import Api
from .plex import WebHooksPlex
api_bp_webhooks = Blueprint('api_webhooks', __name__)
api = Api(api_bp_webhooks)
api.add_resource(WebHooksPlex, '/webhooks/plex')

View File

@ -0,0 +1,76 @@
# coding=utf-8
import json
import requests
import os
import re
from flask import request
from flask_restful import Resource
from bs4 import BeautifulSoup as bso
from database import TableEpisodes, TableShows, TableMovies
from get_subtitle import episode_download_subtitles, movies_download_subtitles
from ..utils import authenticate
class WebHooksPlex(Resource):
@authenticate
def post(self):
json_webhook = request.form.get('payload')
parsed_json_webhook = json.loads(json_webhook)
event = parsed_json_webhook['event']
if event not in ['media.play']:
return '', 204
media_type = parsed_json_webhook['Metadata']['type']
if media_type == 'episode':
season = parsed_json_webhook['Metadata']['parentIndex']
episode = parsed_json_webhook['Metadata']['index']
else:
season = episode = None
ids = []
for item in parsed_json_webhook['Metadata']['Guid']:
splitted_id = item['id'].split('://')
if len(splitted_id) == 2:
ids.append({splitted_id[0]: splitted_id[1]})
if not ids:
return '', 404
if media_type == 'episode':
try:
episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id),
headers={"User-Agent": os.environ["SZ_USER_AGENT"]})
soup = bso(r.content, "html.parser")
series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2]
except:
return '', 404
else:
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableShows.imdbId == series_imdb_id,
TableEpisodes.season == season,
TableEpisodes.episode == episode) \
.dicts() \
.get()
if sonarrEpisodeId:
episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True)
else:
try:
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
except:
return '', 404
else:
radarrId = TableMovies.select(TableMovies.radarrId)\
.where(TableMovies.imdbId == movie_imdb_id)\
.dicts()\
.get()
if radarrId:
movies_download_subtitles(no=radarrId['radarrId'])
return '', 200

View File

@ -28,7 +28,8 @@ def create_app():
else:
app.config["DEBUG"] = False
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*', async_mode='threading')
socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*',
async_mode='threading', allow_upgrades=False, transports='polling')
return app

View File

@ -193,7 +193,7 @@ def update_cleaner(zipfile, bazarr_dir, config_dir):
'^venv' + separator,
'^WinPython' + separator,
separator + '__pycache__' + separator + '$']
if os.path.abspath(bazarr_dir) in os.path.abspath(config_dir):
if os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower():
dir_to_ignore.append('^' + os.path.relpath(config_dir, bazarr_dir) + os.path.sep)
dir_to_ignore_regex = re.compile('(?:% s)' % '|'.join(dir_to_ignore))
logging.debug('BAZARR upgrade leftover cleaner will ignore directories matching this regex: '

View File

@ -57,6 +57,8 @@ defaults = {
'ignore_vobsub_subs': 'False',
'ignore_ass_subs': 'False',
'adaptive_searching': 'False',
'adaptive_searching_delay': '3w',
'adaptive_searching_delta': '1w',
'enabled_providers': '[]',
'multithreading': 'True',
'chmod_enabled': 'False',
@ -92,7 +94,8 @@ defaults = {
'episodes_sync': '60',
'excluded_tags': '[]',
'excluded_series_types': '[]',
'use_ffprobe_cache': 'True'
'use_ffprobe_cache': 'True',
'exclude_season_zero': 'False'
},
'radarr': {
'ip': '127.0.0.1',
@ -132,7 +135,8 @@ defaults = {
},
'addic7ed': {
'username': '',
'password': ''
'password': '',
'vip': 'False'
},
'podnapisi': {
'verify_ssl': 'True'
@ -190,6 +194,10 @@ defaults = {
'approved_only': 'False',
'multithreading': 'True'
},
'embeddedsubtitles': {
'include_ass': 'True',
'include_srt': 'True',
},
'subsync': {
'use_subsync': 'False',
'use_subsync_threshold': 'False',
@ -388,12 +396,12 @@ def save_settings(settings_items):
configure_proxy = True
if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored',
'settings-sonarr-excluded_series_types', 'settings.radarr.excluded_tags',
'settings-radarr-only_monitored']:
'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero',
'settings.radarr.excluded_tags', 'settings-radarr-only_monitored']:
exclusion_updated = True
if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored',
'settings-sonarr-excluded_series_types']:
'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero']:
sonarr_exclusion_updated = True
if key in ['settings.radarr.excluded_tags', 'settings-radarr-only_monitored']:
@ -463,8 +471,10 @@ def save_settings(settings_items):
configure_captcha_func()
if update_schedule:
from api import scheduler
from scheduler import scheduler
from event_handler import event_stream
scheduler.update_configurable_tasks()
event_stream(type='task')
if sonarr_changed:
from signalr_client import sonarr_signalr_client

View File

@ -136,6 +136,8 @@ class TableLanguagesProfiles(BaseModel):
items = TextField()
name = TextField()
profileId = AutoField()
mustContain = TextField(null=True)
mustNotContain = TextField(null=True)
class Meta:
table_name = 'table_languages_profiles'
@ -329,7 +331,9 @@ def migrate_db():
migrator.add_column('table_history_movie', 'provider', TextField(null=True)),
migrator.add_column('table_history_movie', 'score', TextField(null=True)),
migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)),
migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True))
migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)),
migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)),
migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)),
)
@ -386,6 +390,10 @@ def get_exclusion_clause(exclusion_type):
for item in typesList:
where_clause.append((TableShows.seriesType != item))
exclude_season_zero = settings.sonarr.getboolean('exclude_season_zero')
if exclude_season_zero:
where_clause.append((TableEpisodes.season != 0))
return where_clause
@ -394,10 +402,16 @@ def update_profile_id_list():
profile_id_list = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items).dicts()
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain).dicts()
profile_id_list = list(profile_id_list)
for profile in profile_id_list:
profile['items'] = json.loads(profile['items'])
profile['mustContain'] = ast.literal_eval(profile['mustContain']) if profile['mustContain'] else \
profile['mustContain']
profile['mustNotContain'] = ast.literal_eval(profile['mustNotContain']) if profile['mustNotContain'] else \
profile['mustNotContain']
def get_profiles_list(profile_id=None):
@ -422,7 +436,7 @@ def get_desired_languages(profile_id):
if profile_id and profile_id != 'null':
for profile in profile_id_list:
profileId, name, cutoff, items = profile.values()
profileId, name, cutoff, items, mustContain, mustNotContain = profile.values()
if profileId == int(profile_id):
languages = [x['language'] for x in items]
break
@ -438,7 +452,7 @@ def get_profile_id_name(profile_id):
if profile_id and profile_id != 'null':
for profile in profile_id_list:
profileId, name, cutoff, items = profile.values()
profileId, name, cutoff, items, mustContain, mustNotContain = profile.values()
if profileId == int(profile_id):
name_from_id = name
break
@ -455,7 +469,7 @@ def get_profile_cutoff(profile_id):
if profile_id and profile_id != 'null':
cutoff_language = []
for profile in profile_id_list:
profileId, name, cutoff, items = profile.values()
profileId, name, cutoff, items, mustContain, mustNotContain = profile.values()
if cutoff:
if profileId == int(profile_id):
for item in items:
@ -498,6 +512,22 @@ def get_audio_profile_languages(series_id=None, episode_id=None, movie_id=None):
return audio_languages
def get_profile_id(series_id=None, episode_id=None, movie_id=None):
if series_id:
profileId = TableShows.get(TableShows.sonarrSeriesId == series_id).profileId
elif episode_id:
profileId = TableShows.select(TableShows.profileId)\
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId))\
.where(TableEpisodes.sonarrEpisodeId == episode_id)\
.get().profileId
elif movie_id:
profileId = TableMovies.get(TableMovies.radarrId == movie_id).profileId
else:
return None
return profileId
def convert_list_to_clause(arr: list):
if isinstance(arr, list):
return f"({','.join(str(x) for x in arr)})"

View File

@ -8,11 +8,11 @@ import pretty
import time
import socket
import requests
import ast
from get_args import args
from config import settings, get_array_from
from event_handler import event_stream
from utils import get_binary
from subliminal_patch.exceptions import TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked
from subliminal.providers.opensubtitles import DownloadLimitReached
from subliminal.exceptions import DownloadLimitExceeded, ServiceUnavailable
@ -126,6 +126,7 @@ def get_providers_auth():
'addic7ed' : {
'username': settings.addic7ed.username,
'password': settings.addic7ed.password,
'is_vip': settings.addic7ed.getboolean('vip'),
},
'opensubtitles' : {
'username' : settings.opensubtitles.username,
@ -198,6 +199,13 @@ def get_providers_auth():
'email': settings.ktuvit.email,
'hashed_password': settings.ktuvit.hashed_password,
},
'embeddedsubtitles': {
'include_ass': settings.embeddedsubtitles.getboolean('include_ass'),
'include_srt': settings.embeddedsubtitles.getboolean('include_srt'),
'cache_dir': os.path.join(args.config_dir, "cache"),
'ffprobe_path': get_binary("ffprobe"),
'ffmpeg_path': get_binary("ffmpeg"),
}
}
@ -315,12 +323,17 @@ def reset_throttled_providers():
def get_throttled_providers():
providers = {}
if os.path.exists(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')):
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as handle:
providers = handle.read()
if not providers:
providers = {}
return providers
try:
if os.path.exists(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')):
with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as \
handle:
providers = eval(handle.read())
except:
# set empty content in throttled_providers.dat
logging.error("Invalid content in throttled_providers.dat. Resetting")
set_throttled_providers(providers)
finally:
return providers
def set_throttled_providers(data):
@ -328,12 +341,6 @@ def set_throttled_providers(data):
handle.write(data)
try:
tp = eval(str(get_throttled_providers()))
if not isinstance(tp, dict):
raise ValueError('tp should be a dict')
except Exception:
logging.error("Invalid content in throttled_providers.dat. Resetting")
# set empty content in throttled_providers.dat
set_throttled_providers('')
tp = eval(str(get_throttled_providers()))
tp = get_throttled_providers()
if not isinstance(tp, dict):
raise ValueError('tp should be a dict')

View File

@ -117,10 +117,10 @@ def update_series(send_event=True):
logging.debug('BAZARR unable to insert this series into the database:',
path_mappings.path_replace(added_series['path']))
if send_event:
event_stream(type='series', action='update', payload=added_series['sonarrSeriesId'])
if send_event:
event_stream(type='series', action='update', payload=added_series['sonarrSeriesId'])
logging.debug('BAZARR All series synced from Sonarr into database.')
logging.debug('BAZARR All series synced from Sonarr into database.')
def update_one_series(series_id, action):

View File

@ -84,7 +84,7 @@ def get_video(path, title, sceneName, providers=None, media_type="movie"):
def download_subtitle(path, language, audio_language, hi, forced, providers, providers_auth, sceneName, title,
media_type, forced_minimum_score=None, is_upgrade=False):
media_type, forced_minimum_score=None, is_upgrade=False, profile_id=None):
# fixme: supply all missing languages, not only one, to hit providers only once who support multiple languages in
# one query
@ -158,6 +158,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
compute_score=compute_score,
throttle_time=None, # fixme
blacklist=get_blacklist(media_type=media_type),
ban_list=get_ban_list(profile_id),
throttle_callback=provider_throttle,
score_obj=handler,
pre_download_hook=None, # fixme
@ -216,6 +217,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
action = "upgraded"
else:
action = "downloaded"
percent_score = round(subtitle.score * 100 / max_score, 2)
message = downloaded_language + modifier_string + " subtitles " + action + " from " + \
downloaded_provider + " with a score of " + str(percent_score) + "%."
@ -229,6 +231,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
sonarr_series_id=episode_metadata['sonarrSeriesId'],
@ -241,6 +244,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
radarr_id=movie_metadata['radarrId'])
@ -358,6 +362,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title,
providers=providers,
provider_configs=providers_auth,
blacklist=get_blacklist(media_type=media_type),
ban_list=get_ban_list(profileId),
throttle_callback=provider_throttle,
language_hook=None) # fixme
@ -372,6 +377,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title,
providers=['subscene'],
provider_configs=providers_auth,
blacklist=get_blacklist(media_type=media_type),
ban_list=get_ban_list(profileId),
throttle_callback=provider_throttle,
language_hook=None) # fixme
providers_auth['subscene']['only_foreign'] = False
@ -415,9 +421,10 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title,
score, score_without_hash = compute_score(matches, s, video, hearing_impaired=initial_hi, score_obj=handler)
if 'hash' not in matches:
not_matched = scores - matches
s.score = score_without_hash
else:
s.score = score
not_matched = set()
s.score = score_without_hash
if s.hearing_impaired == initial_hi:
matches.add('hearing_impaired')
@ -462,7 +469,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title,
def manual_download_subtitle(path, language, audio_language, hi, forced, subtitle, provider, providers_auth, sceneName,
title, media_type):
title, media_type, profile_id):
logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path)
if settings.general.getboolean('utf8_encode'):
@ -494,6 +501,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
provider_configs=providers_auth,
pool_class=provider_pool(),
blacklist=get_blacklist(media_type=media_type),
ban_list=get_ban_list(profile_id),
throttle_callback=provider_throttle)
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
else:
@ -552,6 +560,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=score,
sonarr_series_id=episode_metadata['sonarrSeriesId'],
@ -564,6 +573,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=score, radarr_id=movie_metadata['radarrId'])
@ -696,7 +706,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'],
percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'], forced=forced,
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
else:
movie_metadata = TableMovies.select(TableMovies.radarrId)\
@ -706,7 +716,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
series_id = ""
episode_id = movie_metadata['radarrId']
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, radarr_id=movie_metadata['radarrId'])
percent_score=100, radarr_id=movie_metadata['radarrId'], forced=forced)
if use_postprocessing :
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language,
@ -755,12 +765,13 @@ def series_download_subtitles(no):
"ignored because of monitored status, series type or series tags: {}".format(no))
return
providers_list = get_providers()
providers_auth = get_providers_auth()
count_episodes_details = len(episodes_details)
for i, episode in enumerate(episodes_details):
providers_list = get_providers()
if providers_list:
show_progress(id='series_search_progress_{}'.format(no),
header='Searching missing subtitles...',
@ -845,10 +856,11 @@ def episode_download_subtitles(no, send_progress=False):
logging.debug("BAZARR no episode with that sonarrEpisodeId can be found in database:", str(no))
return
providers_list = get_providers()
providers_auth = get_providers_auth()
for episode in episodes_details:
providers_list = get_providers()
if providers_list:
if send_progress:
show_progress(id='episode_search_progress_{}'.format(no),
@ -929,7 +941,6 @@ def movies_download_subtitles(no):
else:
movie = movies[0]
providers_list = get_providers()
providers_auth = get_providers_auth()
if ast.literal_eval(movie['missing_subtitles']):
@ -938,15 +949,17 @@ def movies_download_subtitles(no):
count_movie = 0
for i, language in enumerate(ast.literal_eval(movie['missing_subtitles'])):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles)\
.where(TableMovies.radarrId == movie['radarrId'])\
.dicts()\
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
providers_list = get_providers()
if providers_list:
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == movie['radarrId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
show_progress(id='movie_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name=movie['title'],
@ -1008,77 +1021,71 @@ def wanted_download_subtitles(sonarr_episode_id):
.dicts()
episodes_details = list(episodes_details)
providers_list = get_providers()
providers_auth = get_providers_auth()
for episode in episodes_details:
attempt = episode['failedAttempts']
if type(attempt) == str:
attempt = ast.literal_eval(attempt)
for language in ast.literal_eval(episode['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
providers_list = get_providers()
if attempt is None:
attempt = []
attempt.append([language, time.time()])
else:
att = list(zip(*attempt))[0]
if language not in att:
attempt.append([language, time.time()])
if providers_list:
for language in ast.literal_eval(episode['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
TableEpisodes.update({TableEpisodes.failedAttempts: str(attempt)})\
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId'])\
.execute()
if is_search_active(desired_language=language, attempt_string=episode['failedAttempts']):
TableEpisodes.update({TableEpisodes.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=episode['failedAttempts'])}) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.execute()
for i in range(len(attempt)):
if attempt[i][0] == language:
if search_active(attempt[i][1]):
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
result = download_subtitle(path_mappings.path_replace(episode['path']),
language.split(':')[0],
audio_language,
"True" if language.endswith(':hi') else "False",
"True" if language.endswith(':forced') else "False",
providers_list,
providers_auth,
str(episode['scene_name']),
episode['title'],
'series')
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
language_code, provider, score, subs_id, subs_path)
event_stream(type='series', action='update', payload=episode['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId'])
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
logging.debug(
'BAZARR Search is not active for episode ' + episode['path'] + ' Language: ' + attempt[i][
0])
audio_language = 'None'
result = download_subtitle(path_mappings.path_replace(episode['path']),
language.split(':')[0],
audio_language,
"True" if language.endswith(':hi') else "False",
"True" if language.endswith(':forced') else "False",
providers_list,
providers_auth,
str(episode['scene_name']),
episode['title'],
'series')
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
language_code, provider, score, subs_id, subs_path)
event_stream(type='series', action='update', payload=episode['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId'])
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
else:
logging.debug(
f"BAZARR Search is throttled by adaptive search for this episode {episode['path']} and "
f"language: {language}")
else:
logging.info("BAZARR All providers are throttled")
break
def wanted_download_subtitles_movie(radarr_id):
@ -1093,76 +1100,69 @@ def wanted_download_subtitles_movie(radarr_id):
.dicts()
movies_details = list(movies_details)
providers_list = get_providers()
providers_auth = get_providers_auth()
for movie in movies_details:
attempt = movie['failedAttempts']
if type(attempt) == str:
attempt = ast.literal_eval(attempt)
for language in ast.literal_eval(movie['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == movie['radarrId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
providers_list = get_providers()
if attempt is None:
attempt = []
attempt.append([language, time.time()])
else:
att = list(zip(*attempt))[0]
if language not in att:
attempt.append([language, time.time()])
if providers_list:
for language in ast.literal_eval(movie['missing_subtitles']):
# confirm if language is still missing or if cutoff have been reached
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.radarrId == movie['radarrId']) \
.dicts() \
.get()
if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
continue
TableMovies.update({TableMovies.failedAttempts: str(attempt)})\
.where(TableMovies.radarrId == movie['radarrId'])\
.execute()
if is_search_active(desired_language=language, attempt_string=movie['failedAttempts']):
TableMovies.update({TableMovies.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=movie['failedAttempts'])}) \
.where(TableMovies.radarrId == movie['radarrId']) \
.execute()
for i in range(len(attempt)):
if attempt[i][0] == language:
if search_active(attempt[i][1]) is True:
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
result = download_subtitle(path_mappings.path_replace_movie(movie['path']),
language.split(':')[0],
audio_language,
"True" if language.endswith(':hi') else "False",
"True" if language.endswith(':forced') else "False",
providers_list,
providers_auth,
str(movie['sceneName']),
movie['title'],
'movie')
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score,
subs_id, subs_path)
event_stream(type='movie-wanted', action='delete', payload=movie['radarrId'])
send_notifications_movie(movie['radarrId'], message)
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
logging.info(
'BAZARR Search is not active for this Movie ' + movie['path'] + ' Language: ' + attempt[i][
0])
audio_language = 'None'
result = download_subtitle(path_mappings.path_replace_movie(movie['path']),
language.split(':')[0],
audio_language,
"True" if language.endswith(':hi') else "False",
"True" if language.endswith(':forced') else "False",
providers_list,
providers_auth,
str(movie['sceneName']),
movie['title'],
'movie')
if result is not None:
message = result[0]
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
elif forced:
language_code = result[2] + ":forced"
else:
language_code = result[2]
provider = result[3]
score = result[4]
subs_id = result[6]
subs_path = result[7]
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score,
subs_id, subs_path)
event_stream(type='movie-wanted', action='delete', payload=movie['radarrId'])
send_notifications_movie(movie['radarrId'], message)
else:
logging.info(f"BAZARR Search is throttled by adaptive search for this movie {movie['path']} and "
f"language: {language}")
else:
logging.info("BAZARR All providers are throttled")
break
def wanted_search_missing_subtitles_series():
@ -1236,25 +1236,6 @@ def wanted_search_missing_subtitles_movies():
logging.info('BAZARR Finished searching for missing Movies Subtitles. Check History for more information.')
def search_active(timestamp):
if settings.general.getboolean('adaptive_searching'):
search_deadline = timedelta(weeks=3)
search_delta = timedelta(weeks=1)
aa = datetime.fromtimestamp(float(timestamp))
attempt_datetime = datetime.strptime(str(aa).split(".")[0], '%Y-%m-%d %H:%M:%S')
attempt_search_deadline = attempt_datetime + search_deadline
today = datetime.today()
attempt_age_in_days = (today.date() - attempt_search_deadline.date()).days
if today.date() <= attempt_search_deadline.date():
return True
elif attempt_age_in_days % search_delta.days == 0:
return True
else:
return False
else:
return True
def convert_to_guessit(guessit_key, attr_from_db):
try:
return guessit(attr_from_db)[guessit_key]
@ -1490,11 +1471,12 @@ def upgrade_subtitles():
count_movie_to_upgrade = len(movies_to_upgrade)
providers_list = get_providers()
providers_auth = get_providers_auth()
if settings.general.getboolean('use_sonarr'):
for i, episode in enumerate(episodes_to_upgrade):
providers_list = get_providers()
show_progress(id='upgrade_episodes_progress',
header='Upgrading episodes subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['seriesTitle'],
@ -1504,8 +1486,7 @@ def upgrade_subtitles():
value=i,
count=count_episode_to_upgrade)
providers = get_providers()
if not providers:
if not providers_list:
logging.info("BAZARR All providers are throttled")
return
if episode['language'].endswith('forced'):
@ -1562,17 +1543,15 @@ def upgrade_subtitles():
if settings.general.getboolean('use_radarr'):
for i, movie in enumerate(movies_to_upgrade):
providers_list = get_providers()
show_progress(id='upgrade_movies_progress',
header='Upgrading movies subtitles...',
name=movie['title'],
value=i,
count=count_movie_to_upgrade)
providers = get_providers()
if not providers:
logging.info("BAZARR All providers are throttled")
return
if not providers:
if not providers_list:
logging.info("BAZARR All providers are throttled")
return
if movie['language'].endswith('forced'):
@ -1660,9 +1639,15 @@ def postprocessing(command, path):
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score, sonarr_series_id=None,
def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_score, sonarr_series_id=None,
sonarr_episode_id=None, radarr_id=None):
if settings.subsync.getboolean('use_subsync'):
if forced:
logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.')
elif not settings.subsync.getboolean('use_subsync'):
logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.')
else:
logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this '
f'subtitles: {srt_path}.')
if media_type == 'series':
use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold')
subsync_threshold = settings.subsync.subsync_threshold
@ -1694,6 +1679,7 @@ def _get_lang_obj(alpha3):
return sub.subzero_language()
def _get_scores(media_type, min_movie=None, min_ep=None):
series = "series" == media_type
handler = series_score if series else movie_score
@ -1701,3 +1687,154 @@ def _get_scores(media_type, min_movie=None, min_ep=None):
min_ep = min_ep or (240 * 100 / handler.max_score)
min_score_ = int(min_ep if series else min_movie)
return handler.get_scores(min_score_)
def get_ban_list(profile_id):
if profile_id:
profile = get_profiles_list(profile_id)
if profile:
return {'must_contain': profile['mustContain'] or [],
'must_not_contain': profile['mustNotContain'] or []}
return None
def is_search_active(desired_language, attempt_string):
"""
Function to test if it's time to search again after a previous attempt matching the desired language. For 3 weeks,
we search on a scheduled basis but after 3 weeks we start searching only once a week.
@param desired_language: 2 letters language to search for in attempts
@type desired_language: str
@param attempt_string: string representation of a list of lists from database column failedAttempts
@type attempt_string: str
@return: return True if it's time to search again and False if not
@rtype: bool
"""
if settings.general.getboolean('adaptive_searching'):
logging.debug("Adaptive searching is enable, we'll see if it's time to search again...")
try:
# let's try to get a list of lists from the string representation in database
attempts = ast.literal_eval(attempt_string)
if type(attempts) is not list:
# attempts should be a list if not, it's malformed or None
raise ValueError
except ValueError:
logging.debug("Adaptive searching: attempts is malformed. As a failsafe, search will run.")
return True
if not len(attempts):
logging.debug("Adaptive searching: attempts list is empty, search will run.")
return True
# get attempts matching the desired language and sort them by timestamp ascending
matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1])
if not len(matching_attempts):
logging.debug("Adaptive searching: there's no attempts matching desired language, search will run.")
return True
else:
logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}")
# try to get the initial and latest search timestamp from matching attempts
initial_search_attempt = matching_attempts[0]
latest_search_attempt = matching_attempts[-1]
# try to parse the timestamps for those attempts
try:
initial_search_timestamp = datetime.fromtimestamp(initial_search_attempt[1])
latest_search_timestamp = datetime.fromtimestamp(latest_search_attempt[1])
except (OverflowError, ValueError, OSError):
logging.debug("Adaptive searching: unable to parse initial and latest search timestamps, search will run.")
return True
else:
logging.debug(f"Adaptive searching: initial search date for {desired_language} is "
f"{initial_search_timestamp}")
logging.debug(f"Adaptive searching: latest search date for {desired_language} is {latest_search_timestamp}")
# defining basic calculation variables
now = datetime.now()
if settings.general.adaptive_searching_delay.endswith('d'):
extended_search_delay = timedelta(days=int(settings.general.adaptive_searching_delay[:1]))
elif settings.general.adaptive_searching_delay.endswith('w'):
extended_search_delay = timedelta(weeks=int(settings.general.adaptive_searching_delay[:1]))
else:
logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delay from config file: "
f"{settings.general.adaptive_searching_delay}")
return True
logging.debug(f"Adaptive searching: delay after initial search value: {extended_search_delay}")
if settings.general.adaptive_searching_delta.endswith('d'):
extended_search_delta = timedelta(days=int(settings.general.adaptive_searching_delta[:1]))
elif settings.general.adaptive_searching_delta.endswith('w'):
extended_search_delta = timedelta(weeks=int(settings.general.adaptive_searching_delta[:1]))
else:
logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delta from config file: "
f"{settings.general.adaptive_searching_delta}")
return True
logging.debug(f"Adaptive searching: delta between latest search and now value: {extended_search_delta}")
if initial_search_timestamp + extended_search_delay > now:
logging.debug(f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delay} since "
f"initial search, search will run.")
return True
else:
logging.debug(f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delay} since "
f"initial search, let's check if it's time to search again.")
if latest_search_timestamp + extended_search_delta <= now:
logging.debug(
f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delta} since "
f"latest search, search will run.")
return True
else:
logging.debug(
f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delta} since "
f"latest search, we're not ready to search yet.")
return False
logging.debug("adaptive searching is disabled, search will run.")
return True
def updateFailedAttempts(desired_language, attempt_string):
"""
Function to parse attempts and make sure we only keep initial and latest search timestamp for each language.
@param desired_language: 2 letters language to search for in attempts
@type desired_language: str
@param attempt_string: string representation of a list of lists from database column failedAttempts
@type attempt_string: str
@return: return a string representation of a list of lists like [str(language_code), str(attempts)]
@rtype: str
"""
try:
# let's try to get a list of lists from the string representation in database
attempts = ast.literal_eval(attempt_string)
logging.debug(f"Adaptive searching: current attempts value is {attempts}")
if type(attempts) is not list:
# attempts should be a list if not, it's malformed or None
raise ValueError
except ValueError:
logging.debug("Adaptive searching: failed to parse attempts value, we'll use an empty list.")
attempts = []
matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1])
logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}")
filtered_attempts = sorted([x for x in attempts if x[0] != desired_language], key=lambda x: x[1])
logging.debug(f"Adaptive searching: attempts not matching language {desired_language}: {filtered_attempts}")
# get the initial search from attempts if there's one
if len(matching_attempts):
filtered_attempts.append(matching_attempts[0])
# append current attempt with language and timestamp to attempts
filtered_attempts.append([desired_language, datetime.timestamp(datetime.now())])
updated_attempts = sorted(filtered_attempts, key=lambda x: x[0])
logging.debug(f"Adaptive searching: updated attempts that will be saved to database is {updated_attempts}")
return str(updated_attempts)

View File

@ -23,11 +23,11 @@ def update_notifier():
notifiers_current.append([notifier['name']])
for x in results['schemas']:
if [x['service_name']] not in notifiers_current:
notifiers_new.append({'name': x['service_name'], 'enabled': 0})
logging.debug('Adding new notifier agent: ' + x['service_name'])
if [str(x['service_name'])] not in notifiers_current:
notifiers_new.append({'name': str(x['service_name']), 'enabled': 0})
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
else:
notifiers_old.append([x['service_name']])
notifiers_old.append([str(x['service_name'])])
notifiers_to_delete = [item for item in notifiers_current if item not in notifiers_old]

View File

@ -146,6 +146,7 @@ class Score:
def __init__(self, load_profiles=False, **kwargs):
self.data = self.defaults.copy()
self.data.update(**kwargs)
self.data["hash"] = self._hash_score()
self._profiles = []
self._profiles_loaded = False
@ -205,9 +206,16 @@ class Score:
@property
def max_score(self):
return (
sum(val for val in self.scores.values() if val > 0)
+ sum(item.score for item in self._profiles if item.score > 0)
- self.data["hash"]
self.data["hash"]
+ self.data["hearing_impaired"]
+ sum(item.score for item in self._profiles if item.score)
)
def _hash_score(self):
return sum(
val
for key, val in self.data.items()
if key not in ("hash", "hearing_impaired")
)
def __str__(self):

View File

@ -13,8 +13,9 @@ from database import database
from app import create_app
app = create_app()
from api import api_bp
app.register_blueprint(api_bp)
from api import api_bp_list
for item in api_bp_list:
app.register_blueprint(item, url_prefix=base_url.rstrip('/') + '/api')
class Server:

View File

@ -22,9 +22,9 @@ from get_args import args
headers = {"User-Agent": os.environ["SZ_USER_AGENT"]}
class SonarrSignalrClient:
class SonarrSignalrClientLegacy:
def __init__(self):
super(SonarrSignalrClient, self).__init__()
super(SonarrSignalrClientLegacy, self).__init__()
self.apikey_sonarr = None
self.session = Session()
self.session.timeout = 60
@ -92,6 +92,65 @@ class SonarrSignalrClient:
self.connection.exception += self.exception_handler
class SonarrSignalrClient:
def __init__(self):
super(SonarrSignalrClient, self).__init__()
self.apikey_sonarr = None
self.connection = None
def start(self):
self.configure()
logging.info('BAZARR trying to connect to Sonarr SignalR feed...')
while self.connection.transport.state.value not in [0, 1, 2]:
try:
self.connection.start()
except ConnectionError:
time.sleep(5)
def stop(self):
logging.info('BAZARR SignalR client for Sonarr is now disconnected.')
self.connection.stop()
def restart(self):
if self.connection:
if self.connection.transport.state.value in [0, 1, 2]:
self.stop()
if settings.general.getboolean('use_sonarr'):
self.start()
def exception_handler(self):
logging.error("BAZARR connection to Sonarr SignalR feed has failed. We'll try to reconnect.")
self.restart()
@staticmethod
def on_connect_handler():
logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1)
def configure(self):
self.apikey_sonarr = settings.sonarr.apikey
self.connection = HubConnectionBuilder() \
.with_url(url_sonarr() + "/signalr/messages?access_token={}".format(self.apikey_sonarr),
options={
"verify_ssl": False,
"headers": headers
}) \
.with_automatic_reconnect({
"type": "raw",
"keep_alive_interval": 5,
"reconnect_interval": 180,
"max_attempts": None
}).build()
self.connection.on_open(self.on_connect_handler)
self.connection.on_reconnect(lambda: logging.error('BAZARR SignalR client for Sonarr connection as been lost. '
'Trying to reconnect...'))
self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Sonarr is disconnected.'))
self.connection.on_error(self.exception_handler)
self.connection.on("receiveMessage", dispatcher)
class RadarrSignalrClient:
def __init__(self):
super(RadarrSignalrClient, self).__init__()
@ -186,5 +245,6 @@ def dispatcher(data):
return
sonarr_signalr_client = SonarrSignalrClient()
sonarr_signalr_client = SonarrSignalrClientLegacy() if get_sonarr_info.version().startswith(('0.', '2.', '3.')) else \
SonarrSignalrClient()
radarr_signalr_client = RadarrSignalrClient()

View File

@ -256,8 +256,10 @@ class GetSonarrInfo:
if 'version' in sonarr_json:
sonarr_version = sonarr_json['version']
else:
sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey
sonarr_version = requests.get(sv, timeout=60, verify=False, headers=headers).json()['version']
raise json.decoder.JSONDecodeError
except json.decoder.JSONDecodeError:
sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey
sonarr_version = requests.get(sv, timeout=60, verify=False, headers=headers).json()['version']
except Exception:
logging.debug('BAZARR cannot get Sonarr version')
sonarr_version = 'unknown'
@ -434,10 +436,14 @@ def subtitles_apply_mods(language, subtitle_path, mods):
def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
language_code_convert_dict = {
'he': 'iw',
'zt': 'zh-cn',
'zh': 'zh-tw',
}
to_lang = alpha3_from_alpha2(to_lang)
lang_obj = Language(to_lang)
lang_obj = CustomLanguage.from_value(to_lang, "alpha3")
if not lang_obj:
lang_obj = Language(to_lang)
if forced:
lang_obj = Language.rebuild(lang_obj, forced=True)
if hi:
@ -447,7 +453,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
max_characters = 5000
dest_srt_file = get_subtitle_path(video_path, language=lang_obj, extension='.srt', forced_tag=forced, hi_tag=hi)
dest_srt_file = get_subtitle_path(video_path, language=lang_obj if isinstance(lang_obj, Language) else lang_obj.subzero_language(),
extension='.srt', forced_tag=forced, hi_tag=hi)
subs = pysubs2.load(source_srt_file, encoding='utf-8')
lines_list = [x.plaintext for x in subs]
@ -471,8 +478,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi):
for block_str in lines_block_list:
try:
translated_partial_srt_text = GoogleTranslator(source='auto',
target=language_code_convert_dict.get(lang_obj.basename,
lang_obj.basename)
target=language_code_convert_dict.get(lang_obj.alpha2,
lang_obj.alpha2)
).translate(text=block_str)
except:
return False

View File

@ -4,3 +4,4 @@ pytest
pytest-pep8
pytest-flakes
pytest-cov
pytest-vcr

File diff suppressed because it is too large Load Diff

View File

@ -25,7 +25,6 @@
"bootstrap": "^4",
"lodash": "^4",
"moment": "^2.29.1",
"package.json": "^2.0.1",
"rc-slider": "^9.7",
"react": "^17",
"react-bootstrap": "^1",

View File

@ -33,6 +33,8 @@ declare namespace Language {
profileId: number;
cutoff: number | null;
items: ProfileItem[];
mustContain: string[];
mustNotContain: string[];
}
}

View File

@ -28,6 +28,8 @@ interface Settings {
declare namespace Settings {
interface General {
adaptive_searching: boolean;
adaptive_searching_delay: string;
adaptive_searching_delta: string;
anti_captcha_provider?: string;
auto_update: boolean;
base_url?: string;

View File

@ -5,6 +5,7 @@ import {
faUser,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { uniqueId } from "lodash";
import React, { FunctionComponent, useMemo } from "react";
import {
Button,
@ -35,16 +36,22 @@ async function SearchItem(text: string) {
return results.map<SearchResult>((v) => {
let link: string;
let id: string;
if (v.sonarrSeriesId) {
link = `/series/${v.sonarrSeriesId}`;
id = `series-${v.sonarrSeriesId}`;
} else if (v.radarrId) {
link = `/movies/${v.radarrId}`;
id = `movie-${v.radarrId}`;
} else {
link = "";
id = uniqueId("unknown");
}
return {
name: `${v.title} (${v.year})`,
link,
id,
};
});
}

View File

@ -13,6 +13,7 @@ import {
ActionButton,
BaseModal,
BaseModalProps,
Chips,
LanguageSelector,
Selector,
SimpleTable,
@ -31,6 +32,8 @@ function createDefaultProfile(): Language.Profile {
name: "",
items: [],
cutoff: null,
mustContain: [],
mustNotContain: [],
};
}
@ -260,6 +263,28 @@ const LanguagesProfileModal: FunctionComponent<Props & BaseModalProps> = (
></Selector>
<Message>Ignore others if existing</Message>
</Input>
<Input name="Release info must contain">
<Chips
value={current.mustContain}
onChange={(mc) => updateProfile("mustContain", mc)}
></Chips>
<Message>
Subtitles release info must include one of those words or they will be
excluded from search results (regex supported).
</Message>
</Input>
<Input name="Release info must not contain">
<Chips
value={current.mustNotContain}
onChange={(mnc: string[]) => {
updateProfile("mustNotContain", mnc);
}}
></Chips>
<Message>
Subtitles release info including one of those words (case insensitive)
will be excluded from search results (regex supported).
</Message>
</Input>
</BaseModal>
);
};

View File

@ -94,6 +94,40 @@ const Table: FunctionComponent = () => {
});
},
},
{
Header: "Must contain",
accessor: "mustContain",
Cell: (row) => {
const items = row.value;
if (!items) {
return false;
}
return items.map((v) => {
return (
<Badge className={"mx-1"} variant={"secondary"}>
{v}
</Badge>
);
});
},
},
{
Header: "Must not contain",
accessor: "mustNotContain",
Cell: (row) => {
const items = row.value;
if (!items) {
return false;
}
return items.map((v) => {
return (
<Badge className={"mx-1"} variant={"secondary"}>
{v}
</Badge>
);
});
},
},
{
accessor: "profileId",
Cell: ({ row, update }) => {
@ -138,6 +172,8 @@ const Table: FunctionComponent = () => {
name: "",
items: [],
cutoff: null,
mustContain: [],
mustNotContain: [],
};
showModal("profile", profile);
}}

View File

@ -21,6 +21,10 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
defaultKey: {
username: "",
password: "",
vip: false,
},
keyNameOverride: {
vip: "VIP",
},
},
{ key: "argenteam", description: "LATAM Spanish Subtitles Provider" },
@ -46,6 +50,21 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
key: "bsplayer",
name: "BSplayer",
},
{
key: "embeddedsubtitles",
name: "Embedded Subtitles",
description: "Embedded Subtitles from your Media Files",
defaultKey: {
include_srt: true,
include_ass: true,
},
message:
"Warning for cloud users: this provider needs to read the entire file in order to extract subtitles.",
keyNameOverride: {
include_srt: "Include SRT",
include_ass: "Include ASS (will be converted to SRT)",
},
},
{
key: "greeksubs",
name: "GreekSubs",

View File

@ -97,6 +97,16 @@ const SettingsSonarrView: FunctionComponent<Props> = () => {
episodes in Sonarr.
</Message>
</Input>
<Input>
<Check
label="Exclude season zero (extras)"
settingKey="settings-sonarr-exclude_season_zero"
></Check>
<Message>
Episodes from season zero (extras) from automatic download of
subtitles.
</Message>
</Input>
</Group>
<Group header="Path Mappings">
<PathMappingTable type="sonarr"></PathMappingTable>

View File

@ -10,7 +10,13 @@ import {
Slider,
Text,
} from "../components";
import { antiCaptchaOption, colorOptions, folderOptions } from "./options";
import {
adaptiveSearchingDelayOption,
adaptiveSearchingDeltaOption,
antiCaptchaOption,
colorOptions,
folderOptions,
} from "./options";
const subzeroOverride = (key: string) => {
return (settings: Settings) => {
@ -124,16 +130,44 @@ const SettingsSubtitlesView: FunctionComponent = () => {
</CollapseBox>
</Group>
<Group header="Performance / Optimization">
<Input>
<Check
label="Adaptive Searching"
settingKey="settings-general-adaptive_searching"
></Check>
<Message>
When searching for subtitles, Bazarr will search less frequently to
limit call to providers.
</Message>
</Input>
<CollapseBox>
<CollapseBox.Control>
<Input>
<Check
label="Adaptive Searching"
settingKey="settings-general-adaptive_searching"
></Check>
<Message>
When searching for subtitles, Bazarr will reduce search
frequency to limit call to providers.
</Message>
</Input>
</CollapseBox.Control>
<CollapseBox.Content>
<Input>
<Selector
settingKey="settings-general-adaptive_searching_delay"
beforeStaged={(v) => (v === undefined ? "3w" : v)}
options={adaptiveSearchingDelayOption}
></Selector>
<Message>
How much weeks must Bazarr wait after initial search to reduce
search frequency.
</Message>
</Input>
<Input>
<Selector
settingKey="settings-general-adaptive_searching_delta"
beforeStaged={(v) => (v === undefined ? "1w" : v)}
options={adaptiveSearchingDeltaOption}
></Selector>
<Message>
How often should Bazarr search for subtitles when in adaptive
search mode.
</Message>
</Input>
</CollapseBox.Content>
</CollapseBox>
<Input>
<Check
label="Search Enabled Providers Simultaneously"

View File

@ -24,6 +24,48 @@ export const antiCaptchaOption: SelectorOption<string>[] = [
},
];
export const adaptiveSearchingDelayOption: SelectorOption<string>[] = [
{
label: "1 week",
value: "1w",
},
{
label: "2 weeks",
value: "2w",
},
{
label: "3 weeks",
value: "3w",
},
{
label: "4 weeks",
value: "4w",
},
];
export const adaptiveSearchingDeltaOption: SelectorOption<string>[] = [
{
label: "3 days",
value: "3d",
},
{
label: "1 week",
value: "1w",
},
{
label: "2 weeks",
value: "2w",
},
{
label: "3 weeks",
value: "3w",
},
{
label: "4 weeks",
value: "4w",
},
];
function buildColor(name: string) {
return `color(name=${name})`;
}

View File

@ -176,11 +176,11 @@ export const Chips: FunctionComponent<ChipsProp> = (props) => {
const update = useSingleUpdate();
const defaultValue = useLatest<string[]>(settingKey, isArray, override);
const value = useLatest<string[]>(settingKey, isArray, override);
return (
<CChips
defaultValue={defaultValue ?? undefined}
value={value ?? undefined}
onChange={(v) => {
update(v, settingKey);
}}

View File

@ -10,6 +10,7 @@ import { useHistory } from "react-router";
import { useThrottle } from "rooks";
export interface SearchResult {
id: string;
name: string;
link?: string;
}
@ -58,7 +59,7 @@ export const SearchBar: FunctionComponent<Props> = ({
const items = useMemo(() => {
const its = results.map((v) => (
<Dropdown.Item
key={v.name}
key={v.id}
eventKey={v.link}
disabled={v.link === undefined}
>

View File

@ -3,6 +3,7 @@ import React, {
FunctionComponent,
KeyboardEvent,
useCallback,
useEffect,
useMemo,
useRef,
useState,
@ -14,15 +15,31 @@ const SplitKeys = ["Tab", "Enter", " ", ",", ";"];
export interface ChipsProps {
disabled?: boolean;
defaultValue?: readonly string[];
value?: readonly string[];
onChange?: (v: string[]) => void;
}
export const Chips: FunctionComponent<ChipsProps> = ({
defaultValue,
value,
disabled,
onChange,
}) => {
const [chips, setChips] = useState(defaultValue ?? []);
const [chips, setChips] = useState<Readonly<string[]>>(() => {
if (value) {
return value;
}
if (defaultValue) {
return defaultValue;
}
return [];
});
useEffect(() => {
if (value) {
setChips(value);
}
}, [value]);
const input = useRef<HTMLInputElement>(null);

View File

@ -13,8 +13,8 @@ export const availableTranslation = {
ca: "catalan",
ceb: "cebuano",
ny: "chichewa",
"zh-cn": "chinese (simplified)",
"zh-tw": "chinese (traditional)",
zh: "chinese (simplified)",
zt: "chinese (traditional)",
co: "corsican",
hr: "croatian",
cs: "czech",

View File

@ -34,6 +34,7 @@ from .common import MATCH_ALL_TAG
from .utils import is_exclusive_match
from .utils import parse_list
from .utils import parse_urls
from .utils import cwe312_url
from .logger import logger
from .AppriseAsset import AppriseAsset
@ -58,13 +59,15 @@ class Apprise(object):
"""
def __init__(self, servers=None, asset=None, debug=False):
def __init__(self, servers=None, asset=None, location=None, debug=False):
"""
Loads a set of server urls while applying the Asset() module to each
if specified.
If no asset is provided, then the default asset is used.
Optionally specify a global ContentLocation for a more strict means
of handling Attachments.
"""
# Initialize a server list of URLs
@ -87,6 +90,11 @@ class Apprise(object):
# Set our debug flag
self.debug = debug
# Store our hosting location for optional strict rule handling
# of Attachments. Setting this to None removes any attachment
# restrictions.
self.location = location
@staticmethod
def instantiate(url, asset=None, tag=None, suppress_exceptions=True):
"""
@ -116,9 +124,14 @@ class Apprise(object):
# Initialize our result set
results = None
# Prepare our Asset Object
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
if isinstance(url, six.string_types):
# Acquire our url tokens
results = plugins.url_to_dict(url)
results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if results is None:
# Failed to parse the server URL; detailed logging handled
# inside url_to_dict - nothing to report here.
@ -132,25 +145,40 @@ class Apprise(object):
# schema is a mandatory dictionary item as it is the only way
# we can index into our loaded plugins
logger.error('Dictionary does not include a "schema" entry.')
logger.trace('Invalid dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
logger.trace(
'Invalid dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v)
for k, v in results.items()])))
return None
logger.trace('Dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
logger.trace(
'Dictionary unpacked as:{}{}'.format(
os.linesep, os.linesep.join(
['{}="{}"'.format(k, v) for k, v in results.items()])))
# Otherwise we handle the invalid input specified
else:
logger.error('Invalid URL specified: {}'.format(url))
logger.error(
'An invalid URL type (%s) was specified for instantiation',
type(url))
return None
if not plugins.SCHEMA_MAP[results['schema']].enabled:
#
# First Plugin Enable Check (Pre Initialization)
#
# Plugin has been disabled at a global level
logger.error(
'%s:// is disabled on this system.', results['schema'])
return None
# Build a list of tags to associate with the newly added notifications
results['tag'] = set(parse_list(tag))
# Prepare our Asset Object
results['asset'] = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Set our Asset Object
results['asset'] = asset
if suppress_exceptions:
try:
@ -159,14 +187,21 @@ class Apprise(object):
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
# Create log entry of loaded URL
logger.debug('Loaded {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
plugin.url()))
logger.debug(
'Loaded {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
plugin.url(privacy=asset.secure_logging)))
except Exception:
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# the arguments are invalid or can not be used.
logger.error('Could not load {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name, url))
logger.error(
'Could not load {} URL: {}'.format(
plugins.SCHEMA_MAP[results['schema']].service_name,
loggable_url))
return None
else:
@ -174,6 +209,24 @@ class Apprise(object):
# URL information but don't wrap it in a try catch
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
if not plugin.enabled:
#
# Second Plugin Enable Check (Post Initialization)
#
# Service/Plugin is disabled (on a more local level). This is a
# case where the plugin was initially enabled but then after the
# __init__() was called under the hood something pre-determined
# that it could no longer be used.
# The only downside to doing it this way is services are
# initialized prior to returning the details() if 3rd party tools
# are polling what is available. These services that become
# disabled thereafter are shown initially that they can be used.
logger.error(
'%s:// has become disabled on this system.', results['schema'])
return None
return plugin
def add(self, servers, asset=None, tag=None):
@ -286,7 +339,8 @@ class Apprise(object):
return
def notify(self, body, title='', notify_type=NotifyType.INFO,
body_format=None, tag=MATCH_ALL_TAG, attach=None):
body_format=None, tag=MATCH_ALL_TAG, attach=None,
interpret_escapes=None):
"""
Send a notification to all of the plugins previously loaded.
@ -306,47 +360,158 @@ class Apprise(object):
Attach can contain a list of attachment URLs. attach can also be
represented by a an AttachBase() (or list of) object(s). This
identifies the products you wish to notify
Set interpret_escapes to True if you want to pre-escape a string
such as turning a \n into an actual new line, etc.
"""
if ASYNCIO_SUPPORT:
return py3compat.asyncio.tosync(
self.async_notify(
body, title,
notify_type=notify_type, body_format=body_format,
tag=tag, attach=attach,
interpret_escapes=interpret_escapes,
),
debug=self.debug
)
else:
try:
results = list(
self._notifyall(
Apprise._notifyhandler,
body, title,
notify_type=notify_type, body_format=body_format,
tag=tag, attach=attach,
interpret_escapes=interpret_escapes,
)
)
except TypeError:
# No notifications sent, and there was an internal error.
return False
else:
if len(results) > 0:
# All notifications sent, return False if any failed.
return all(results)
else:
# No notifications sent.
return None
def async_notify(self, *args, **kwargs):
"""
Send a notification to all of the plugins previously loaded, for
asynchronous callers. This method is an async method that should be
awaited on, even if it is missing the async keyword in its signature.
(This is omitted to preserve syntax compatibility with Python 2.)
The arguments are identical to those of Apprise.notify(). This method
is not available in Python 2.
"""
try:
coroutines = list(
self._notifyall(
Apprise._notifyhandlerasync, *args, **kwargs))
except TypeError:
# No notifications sent, and there was an internal error.
return py3compat.asyncio.toasyncwrap(False)
else:
if len(coroutines) > 0:
# All notifications sent, return False if any failed.
return py3compat.asyncio.notify(coroutines)
else:
# No notifications sent.
return py3compat.asyncio.toasyncwrap(None)
@staticmethod
def _notifyhandler(server, **kwargs):
"""
The synchronous notification sender. Returns True if the notification
sent successfully.
"""
try:
# Send notification
return server.notify(**kwargs)
except TypeError:
# These our our internally thrown notifications
return False
except Exception:
# A catch all so we don't have to abort early
# just because one of our plugins has a bug in it.
logger.exception("Unhandled Notification Exception")
return False
@staticmethod
def _notifyhandlerasync(server, **kwargs):
"""
The asynchronous notification sender. Returns a coroutine that yields
True if the notification sent successfully.
"""
if server.asset.async_mode:
return server.async_notify(**kwargs)
else:
# Send the notification immediately, and wrap the result in a
# coroutine.
status = Apprise._notifyhandler(server, **kwargs)
return py3compat.asyncio.toasyncwrap(status)
def _notifyall(self, handler, body, title='', notify_type=NotifyType.INFO,
body_format=None, tag=MATCH_ALL_TAG, attach=None,
interpret_escapes=None):
"""
Creates notifications for all of the plugins loaded.
Returns a generator that calls handler for each notification. The first
and only argument supplied to handler is the server, and the keyword
arguments are exactly as they would be passed to server.notify().
"""
if len(self) == 0:
# Nothing to notify
return False
# Initialize our return result which only turns to True if we send
# at least one valid notification
status = None
raise TypeError("No service(s) to notify")
if not (title or body):
return False
raise TypeError("No message content specified to deliver")
if six.PY2:
# Python 2.7.x Unicode Character Handling
# Ensure we're working with utf-8
if isinstance(title, unicode): # noqa: F821
title = title.encode('utf-8')
if isinstance(body, unicode): # noqa: F821
body = body.encode('utf-8')
# Tracks conversions
conversion_map = dict()
# Prepare attachments if required
if attach is not None and not isinstance(attach, AppriseAttachment):
try:
attach = AppriseAttachment(attach, asset=self.asset)
except TypeError:
# bad attachments
return False
attach = AppriseAttachment(
attach, asset=self.asset, location=self.location)
# Allow Asset default value
body_format = self.asset.body_format \
if body_format is None else body_format
# for asyncio support; we track a list of our servers to notify
# sequentially
coroutines = []
# Allow Asset default value
interpret_escapes = self.asset.interpret_escapes \
if interpret_escapes is None else interpret_escapes
# Iterate over our loaded plugins
for server in self.find(tag):
if status is None:
# We have at least one server to notify; change status
# to be a default value of True from now (purely an
# initialiation at this point)
status = True
# If our code reaches here, we either did not define a tag (it
# was set to None), or we did define a tag and the logic above
# determined we need to notify the service it's associated with
@ -396,48 +561,59 @@ class Apprise(object):
# Store entry directly
conversion_map[server.notify_format] = body
if ASYNCIO_SUPPORT and server.asset.async_mode:
# Build a list of servers requiring notification
# that will be triggered asynchronously afterwards
coroutines.append(server.async_notify(
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach))
if interpret_escapes:
#
# Escape our content
#
# We gather at this point and notify at the end
continue
try:
# Added overhead required due to Python 3 Encoding Bug
# identified here: https://bugs.python.org/issue21331
conversion_map[server.notify_format] = \
conversion_map[server.notify_format]\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
try:
# Send notification
if not server.notify(
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach):
except UnicodeDecodeError: # pragma: no cover
# This occurs using a very old verion of Python 2.7 such
# as the one that ships with CentOS/RedHat 7.x (v2.7.5).
conversion_map[server.notify_format] = \
conversion_map[server.notify_format] \
.decode('string_escape')
# Toggle our return status flag
status = False
except AttributeError:
# Must be of string type
logger.error('Failed to escape message body')
raise TypeError
except TypeError:
# These our our internally thrown notifications
status = False
if title:
try:
# Added overhead required due to Python 3 Encoding Bug
# identified here: https://bugs.python.org/issue21331
title = title\
.encode('ascii', 'backslashreplace')\
.decode('unicode-escape')
except Exception:
# A catch all so we don't have to abort early
# just because one of our plugins has a bug in it.
logger.exception("Notification Exception")
status = False
except UnicodeDecodeError: # pragma: no cover
# This occurs using a very old verion of Python 2.7
# such as the one that ships with CentOS/RedHat 7.x
# (v2.7.5).
title = title.decode('string_escape')
if coroutines:
# perform our async notification(s)
if not py3compat.asyncio.notify(coroutines, debug=self.debug):
# Toggle our status only if we had a failure
status = False
except AttributeError:
# Must be of string type
logger.error('Failed to escape message title')
raise TypeError
return status
yield handler(
server,
body=conversion_map[server.notify_format],
title=title,
notify_type=notify_type,
attach=attach
)
def details(self, lang=None):
def details(self, lang=None, show_requirements=False, show_disabled=False):
"""
Returns the details associated with the Apprise object
@ -453,8 +629,27 @@ class Apprise(object):
'asset': self.asset.details(),
}
# to add it's mapping to our hash table
for plugin in set(plugins.SCHEMA_MAP.values()):
# Iterate over our hashed plugins and dynamically build details on
# their status:
content = {
'service_name': getattr(plugin, 'service_name', None),
'service_url': getattr(plugin, 'service_url', None),
'setup_url': getattr(plugin, 'setup_url', None),
# Placeholder - populated below
'details': None
}
# Standard protocol(s) should be None or a tuple
enabled = getattr(plugin, 'enabled', True)
if not show_disabled and not enabled:
# Do not show inactive plugins
continue
elif show_disabled:
# Add current state to response
content['enabled'] = enabled
# Standard protocol(s) should be None or a tuple
protocols = getattr(plugin, 'protocol', None)
@ -466,31 +661,35 @@ class Apprise(object):
if isinstance(secure_protocols, six.string_types):
secure_protocols = (secure_protocols, )
# Add our protocol details to our content
content.update({
'protocols': protocols,
'secure_protocols': secure_protocols,
})
if not lang:
# Simply return our results
details = plugins.details(plugin)
content['details'] = plugins.details(plugin)
if show_requirements:
content['requirements'] = plugins.requirements(plugin)
else:
# Emulate the specified language when returning our results
with self.locale.lang_at(lang):
details = plugins.details(plugin)
content['details'] = plugins.details(plugin)
if show_requirements:
content['requirements'] = plugins.requirements(plugin)
# Build our response object
response['schemas'].append({
'service_name': getattr(plugin, 'service_name', None),
'service_url': getattr(plugin, 'service_url', None),
'setup_url': getattr(plugin, 'setup_url', None),
'protocols': protocols,
'secure_protocols': secure_protocols,
'details': details,
})
response['schemas'].append(content)
return response
def urls(self):
def urls(self, privacy=False):
"""
Returns all of the loaded URLs defined in this apprise object.
"""
return [x.url() for x in self.servers]
return [x.url(privacy=privacy) for x in self.servers]
def pop(self, index):
"""
@ -592,3 +791,7 @@ class Apprise(object):
"""
return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig))
else len(s.servers()) for s in self.servers])
if six.PY2:
del Apprise.async_notify

63
libs/apprise/Apprise.pyi Normal file
View File

@ -0,0 +1,63 @@
from typing import Any, Dict, List, Iterable, Iterator, Optional
from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase,
NotifyBase, NotifyFormat, NotifyType)
from .common import ContentLocation
_Server = Union[str, ConfigBase, NotifyBase, AppriseConfig]
_Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]]
# Can't define this recursively as mypy doesn't support recursive types:
# https://github.com/python/mypy/issues/731
_Tag = Union[str, Iterable[Union[str, Iterable[str]]]]
class Apprise:
def __init__(
self,
servers: _Servers = ...,
asset: Optional[AppriseAsset] = ...,
location: Optional[ContentLocation] = ...,
debug: bool = ...
) -> None: ...
@staticmethod
def instantiate(
url: Union[str, Dict[str, NotifyBase]],
asset: Optional[AppriseAsset] = ...,
tag: Optional[_Tag] = ...,
suppress_exceptions: bool = ...
) -> NotifyBase: ...
def add(
self,
servers: _Servers = ...,
asset: Optional[AppriseAsset] = ...,
tag: Optional[_Tag] = ...
) -> bool: ...
def clear(self) -> None: ...
def find(self, tag: str = ...) -> Iterator[Apprise]: ...
def notify(
self,
body: str,
title: str = ...,
notify_type: NotifyType = ...,
body_format: NotifyFormat = ...,
tag: _Tag = ...,
attach: Optional[AppriseAttachment] = ...,
interpret_escapes: Optional[bool] = ...
) -> bool: ...
async def async_notify(
self,
body: str,
title: str = ...,
notify_type: NotifyType = ...,
body_format: NotifyFormat = ...,
tag: _Tag = ...,
attach: Optional[AppriseAttachment] = ...,
interpret_escapes: Optional[bool] = ...
) -> bool: ...
def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ...
def urls(self, privacy: bool = ...) -> Iterable[str]: ...
def pop(self, index: int) -> ConfigBase: ...
def __getitem__(self, index: int) -> ConfigBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[ConfigBase]: ...
def __len__(self) -> int: ...

View File

@ -24,7 +24,7 @@
# THE SOFTWARE.
import re
from uuid import uuid4
from os.path import join
from os.path import dirname
from os.path import isfile
@ -105,6 +105,36 @@ class AppriseAsset(object):
# notifications are sent sequentially (one after another)
async_mode = True
# Whether or not to interpret escapes found within the input text prior
# to passing it upstream. Such as converting \t to an actual tab and \n
# to a new line.
interpret_escapes = False
# For more detail see CWE-312 @
# https://cwe.mitre.org/data/definitions/312.html
#
# By enabling this, the logging output has additional overhead applied to
# it preventing secure password and secret information from being
# displayed in the logging. Since there is overhead involved in performing
# this cleanup; system owners who run in a very isolated environment may
# choose to disable this for a slight performance bump. It is recommended
# that you leave this option as is otherwise.
secure_logging = True
# All internal/system flags are prefixed with an underscore (_)
# These can only be initialized using Python libraries and are not picked
# up from (yaml) configuration files (if set)
# An internal counter that is used by AppriseAPI
# (https://github.com/caronc/apprise-api). The idea is to allow one
# instance of AppriseAPI to call another, but to track how many times
# this occurs. It's intent is to prevent a loop where an AppriseAPI
# Server calls itself (or loops indefinitely)
_recursion = 0
# A unique identifer we can use to associate our calling source
_uid = str(uuid4())
def __init__(self, **kwargs):
"""
Asset Initialization

View File

@ -0,0 +1,34 @@
from typing import Dict, Optional
from . import NotifyFormat, NotifyType
class AppriseAsset:
app_id: str
app_desc: str
app_url: str
html_notify_map: Dict[NotifyType, str]
default_html_color: str
default_extension: str
theme: Optional[str]
image_url_mask: str
image_url_logo: str
image_path_mask: Optional[str]
body_format: Optional[NotifyFormat]
async_mode: bool
interpret_escapes: bool
def __init__(
self,
app_id: str = ...,
app_desc: str = ...,
app_url: str = ...,
html_notify_map: Dict[NotifyType, str] = ...,
default_html_color: str = ...,
default_extension: str = ...,
theme: Optional[str] = ...,
image_url_mask: str = ...,
image_url_logo: str = ...,
image_path_mask: Optional[str] = ...,
body_format: Optional[NotifyFormat] = ...,
async_mode: bool = ...,
interpret_escapes: bool = ...
) -> None: ...

View File

@ -29,6 +29,8 @@ from . import attachment
from . import URLBase
from .AppriseAsset import AppriseAsset
from .logger import logger
from .common import ContentLocation
from .common import CONTENT_LOCATIONS
from .utils import GET_SCHEMA_RE
@ -38,7 +40,8 @@ class AppriseAttachment(object):
"""
def __init__(self, paths=None, asset=None, cache=True, **kwargs):
def __init__(self, paths=None, asset=None, cache=True, location=None,
**kwargs):
"""
Loads all of the paths/urls specified (if any).
@ -59,6 +62,25 @@ class AppriseAttachment(object):
It's also worth nothing that the cache value is only set to elements
that are not already of subclass AttachBase()
Optionally set your current ContentLocation in the location argument.
This is used to further handle attachments. The rules are as follows:
- INACCESSIBLE: You simply have disabled use of the object; no
attachments will be retrieved/handled.
- HOSTED: You are hosting an attachment service for others.
In these circumstances all attachments that are LOCAL
based (such as file://) will not be allowed.
- LOCAL: The least restrictive mode as local files can be
referenced in addition to hosted.
In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will
continue to be inaccessible. However if you set this field (location)
to None (it's default value) the attachment location category will not
be tested in any way (all attachment types will be allowed).
The location field is also a global option that can be set when
initializing the Apprise object.
"""
# Initialize our attachment listings
@ -71,6 +93,15 @@ class AppriseAttachment(object):
self.asset = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
if location is not None and location not in CONTENT_LOCATIONS:
msg = "An invalid Attachment location ({}) was specified." \
.format(location)
logger.warning(msg)
raise TypeError(msg)
# Store our location
self.location = location
# Now parse any paths specified
if paths is not None:
# Store our path(s)
@ -123,26 +154,45 @@ class AppriseAttachment(object):
# Iterate over our attachments
for _attachment in attachments:
if isinstance(_attachment, attachment.AttachBase):
# Go ahead and just add our attachment into our list
self.attachments.append(_attachment)
if self.location == ContentLocation.INACCESSIBLE:
logger.warning(
"Attachments are disabled; ignoring {}"
.format(_attachment))
return_status = False
continue
elif not isinstance(_attachment, six.string_types):
if isinstance(_attachment, six.string_types):
logger.debug("Loading attachment: {}".format(_attachment))
# Instantiate ourselves an object, this function throws or
# returns None if it fails
instance = AppriseAttachment.instantiate(
_attachment, asset=asset, cache=cache)
if not isinstance(instance, attachment.AttachBase):
return_status = False
continue
elif not isinstance(_attachment, attachment.AttachBase):
logger.warning(
"An invalid attachment (type={}) was specified.".format(
type(_attachment)))
return_status = False
continue
logger.debug("Loading attachment: {}".format(_attachment))
else:
# our entry is of type AttachBase, so just go ahead and point
# our instance to it for some post processing below
instance = _attachment
# Instantiate ourselves an object, this function throws or
# returns None if it fails
instance = AppriseAttachment.instantiate(
_attachment, asset=asset, cache=cache)
if not isinstance(instance, attachment.AttachBase):
# Apply some simple logic if our location flag is set
if self.location and ((
self.location == ContentLocation.HOSTED
and instance.location != ContentLocation.HOSTED)
or instance.location == ContentLocation.INACCESSIBLE):
logger.warning(
"Attachment was disallowed due to accessibility "
"restrictions ({}->{}): {}".format(
self.location, instance.location,
instance.url(privacy=True)))
return_status = False
continue

View File

@ -0,0 +1,38 @@
from typing import Any, Iterable, Optional, Union
from . import AppriseAsset, ContentLocation
from .attachment import AttachBase
_Attachment = Union[str, AttachBase]
_Attachments = Iterable[_Attachment]
class AppriseAttachment:
def __init__(
self,
paths: Optional[_Attachments] = ...,
asset: Optional[AppriseAttachment] = ...,
cache: bool = ...,
location: Optional[ContentLocation] = ...,
**kwargs: Any
) -> None: ...
def add(
self,
attachments: _Attachments,
asset: Optional[AppriseAttachment] = ...,
cache: Optional[bool] = ...
) -> bool: ...
@staticmethod
def instantiate(
url: str,
asset: Optional[AppriseAsset] = ...,
cache: Optional[bool] = ...,
suppress_exceptions: bool = ...
) -> NotifyBase: ...
def clear(self) -> None: ...
def size(self) -> int: ...
def pop(self, index: int = ...) -> AttachBase: ...
def __getitem__(self, index: int) -> AttachBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[AttachBase]: ...
def __len__(self) -> int: ...

View File

@ -0,0 +1,49 @@
from typing import Any, Iterable, Iterator, List, Optional, Union
from . import AppriseAsset, NotifyBase
from .config import ConfigBase
_Configs = Union[ConfigBase, str, Iterable[str]]
class AppriseConfig:
def __init__(
self,
paths: Optional[_Configs] = ...,
asset: Optional[AppriseAsset] = ...,
cache: bool = ...,
recursion: int = ...,
insecure_includes: bool = ...,
**kwargs: Any
) -> None: ...
def add(
self,
configs: _Configs,
asset: Optional[AppriseAsset] = ...,
cache: bool = ...,
recursion: Optional[bool] = ...,
insecure_includes: Optional[bool] = ...
) -> bool: ...
def add_config(
self,
content: str,
asset: Optional[AppriseAsset] = ...,
tag: Optional[str] = ...,
format: Optional[str] = ...,
recursion: Optional[int] = ...,
insecure_includes: Optional[bool] = ...
) -> bool: ...
def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ...
def instantiate(
url: str,
asset: Optional[AppriseAsset] = ...,
tag: Optional[str] = ...,
cache: Optional[bool] = ...
) -> NotifyBase: ...
def clear(self) -> None: ...
def server_pop(self, index: int) -> ConfigBase: ...
def pop(self, index: int = ...) -> ConfigBase: ...
def __getitem__(self, index: int) -> ConfigBase: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...
def __iter__(self) -> Iterator[ConfigBase]: ...
def __len__(self) -> int: ...

View File

@ -25,7 +25,7 @@
import re
import six
import logging
from .logger import logger
from time import sleep
from datetime import datetime
from xml.sax.saxutils import escape as sax_escape
@ -47,6 +47,7 @@ from .AppriseAsset import AppriseAsset
from .utils import parse_url
from .utils import parse_bool
from .utils import parse_list
from .utils import parse_phone_no
# Used to break a path list into parts
PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
@ -115,8 +116,8 @@ class URLBase(object):
# Secure sites should be verified against a Certificate Authority
verify_certificate = True
# Logging
logger = logging.getLogger(__name__)
# Logging to our global logger
logger = logger
# Define a default set of template arguments used for dynamically building
# details about our individual plugins for developers.
@ -280,7 +281,7 @@ class URLBase(object):
self._last_io_datetime = reference
return
if self.request_rate_per_sec <= 0.0:
if self.request_rate_per_sec <= 0.0 and not wait:
# We're done if there is no throttle limit set
return
@ -560,6 +561,39 @@ class URLBase(object):
return content
@staticmethod
def parse_phone_no(content, unquote=True):
"""A wrapper to utils.parse_phone_no() with unquoting support
Parses a specified set of data and breaks it into a list.
Args:
content (str): The path to split up into a list. If a list is
provided, then it's individual entries are processed.
unquote (:obj:`bool`, optional): call unquote on each element
added to the returned list.
Returns:
list: A unique list containing all of the elements in the path
"""
if unquote:
try:
content = URLBase.unquote(content)
except TypeError:
# Nothing further to do
return []
except AttributeError:
# This exception ONLY gets thrown under Python v2.7 if an
# object() is passed in place of the content
return []
content = parse_phone_no(content)
return content
@property
def app_id(self):
return self.asset.app_id if self.asset.app_id else ''
@ -636,6 +670,8 @@ class URLBase(object):
results['qsd'].get('verify', True))
# Password overrides
if 'password' in results['qsd']:
results['password'] = results['qsd']['password']
if 'pass' in results['qsd']:
results['password'] = results['qsd']['pass']

16
libs/apprise/URLBase.pyi Normal file
View File

@ -0,0 +1,16 @@
from logging import logger
from typing import Any, Iterable, Set, Optional
class URLBase:
service_name: Optional[str]
protocol: Optional[str]
secure_protocol: Optional[str]
request_rate_per_sec: int
socket_connect_timeout: float
socket_read_timeout: float
tags: Set[str]
verify_certificate: bool
logger: logger
def url(self, privacy: bool = ..., *args: Any, **kwargs: Any) -> str: ...
def __contains__(self, tags: Iterable[str]) -> bool: ...
def __str__(self) -> str: ...

View File

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>
# Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
@ -23,11 +23,11 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
__title__ = 'apprise'
__version__ = '0.8.8'
__title__ = 'Apprise'
__version__ = '0.9.6'
__author__ = 'Chris Caron'
__license__ = 'MIT'
__copywrite__ = 'Copyright (C) 2020 Chris Caron <lead2gold@gmail.com>'
__copywrite__ = 'Copyright (C) 2021 Chris Caron <lead2gold@gmail.com>'
__email__ = 'lead2gold@gmail.com'
__status__ = 'Production'
@ -41,8 +41,10 @@ from .common import OverflowMode
from .common import OVERFLOW_MODES
from .common import ConfigFormat
from .common import CONFIG_FORMATS
from .common import ConfigIncludeMode
from .common import CONFIG_INCLUDE_MODES
from .common import ContentIncludeMode
from .common import CONTENT_INCLUDE_MODES
from .common import ContentLocation
from .common import CONTENT_LOCATIONS
from .URLBase import URLBase
from .URLBase import PrivacyMode
@ -55,10 +57,13 @@ from .AppriseAsset import AppriseAsset
from .AppriseConfig import AppriseConfig
from .AppriseAttachment import AppriseAttachment
# Inherit our logging with our additional entries added to it
from .logger import logging
from .logger import logger
from .logger import LogCapture
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
logging.getLogger(__name__).addHandler(logging.NullHandler())
__all__ = [
# Core
@ -69,6 +74,10 @@ __all__ = [
'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode',
'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES',
'ConfigFormat', 'CONFIG_FORMATS',
'ConfigIncludeMode', 'CONFIG_INCLUDE_MODES',
'ContentIncludeMode', 'CONTENT_INCLUDE_MODES',
'ContentLocation', 'CONTENT_LOCATIONS',
'PrivacyMode',
# Logging
'logging', 'logger', 'LogCapture',
]

View File

@ -1,22 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
<xs:element name="Notification">
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="MessageType" type="xs:string" />
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Subject" type="xs:string" />
<xs:element name="Message" type="xs:string" />
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="Subject" type="xs:string" />
<xs:element name="MessageType">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Message" type="xs:string" />
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema attributeFormDefault="unqualified" elementFormDefault="qualified" xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:import namespace="http://schemas.xmlsoap.org/soap/envelope/" schemaLocation="http://schemas.xmlsoap.org/soap/envelope/"/>
<xs:element name="Notification">
<xs:complexType>
<xs:sequence>
<xs:element name="Version" type="xs:string" />
<xs:element name="Subject" type="xs:string" />
<xs:element name="MessageType">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:enumeration value="success" />
<xs:enumeration value="failure" />
<xs:enumeration value="info" />
<xs:enumeration value="warning" />
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="Message" type="xs:string" />
<xs:element name="Attachments" minOccurs="0">
<xs:complexType>
<xs:sequence>
<xs:element name="Attachment" minOccurs="0" maxOccurs="unbounded">
<xs:complexType>
<xs:simpleContent>
<xs:extension base="xs:string">
<xs:attribute name="mimetype" type="xs:string" use="required"/>
<xs:attribute name="filename" type="xs:string" use="required"/>
</xs:extension>
</xs:simpleContent>
</xs:complexType>
</xs:element>
</xs:sequence>
<xs:attribute name="encoding" type="xs:string" use="required"/>
</xs:complexType>
</xs:element>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>

View File

@ -28,6 +28,7 @@ import time
import mimetypes
from ..URLBase import URLBase
from ..utils import parse_bool
from ..common import ContentLocation
from ..AppriseLocale import gettext_lazy as _
@ -62,6 +63,11 @@ class AttachBase(URLBase):
# 5 MB = 5242880 bytes
max_file_size = 5242880
# By default all attachments types are inaccessible.
# Developers of items identified in the attachment plugin directory
# are requried to set a location
location = ContentLocation.INACCESSIBLE
# Here is where we define all of the arguments we accept on the url
# such as: schema://whatever/?overflow=upstream&format=text
# These act the same way as tokens except they are optional and/or

View File

@ -0,0 +1,37 @@
from typing import Any, Dict, Optional
from .. import ContentLocation
class AttachBase:
max_detect_buffer_size: int
unknown_mimetype: str
unknown_filename: str
unknown_filename_extension: str
strict: bool
max_file_size: int
location: ContentLocation
template_args: Dict[str, Any]
def __init__(
self,
name: Optional[str] = ...,
mimetype: Optional[str] = ...,
cache: Optional[bool] = ...,
**kwargs: Any
) -> None: ...
@property
def path(self) -> Optional[str]: ...
@property
def name(self) -> Optional[str]: ...
@property
def mimetype(self) -> Optional[str]: ...
def exists(self) -> bool: ...
def invalidate(self) -> None: ...
def download(self) -> bool: ...
@staticmethod
def parse_url(
url: str,
verify_host: bool = ...
) -> Dict[str, Any]: ...
def __len__(self) -> int: ...
def __bool__(self) -> bool: ...
def __nonzero__(self) -> bool: ...

View File

@ -26,6 +26,7 @@
import re
import os
from .AttachBase import AttachBase
from ..common import ContentLocation
from ..AppriseLocale import gettext_lazy as _
@ -40,6 +41,10 @@ class AttachFile(AttachBase):
# The default protocol
protocol = 'file'
# Content is local to the same location as the apprise instance
# being called (server-side)
location = ContentLocation.LOCAL
def __init__(self, path, **kwargs):
"""
Initialize Local File Attachment Object
@ -81,6 +86,10 @@ class AttachFile(AttachBase):
validate it.
"""
if self.location == ContentLocation.INACCESSIBLE:
# our content is inaccessible
return False
# Ensure any existing content set has been invalidated
self.invalidate()

View File

@ -29,6 +29,7 @@ import six
import requests
from tempfile import NamedTemporaryFile
from .AttachBase import AttachBase
from ..common import ContentLocation
from ..URLBase import PrivacyMode
from ..AppriseLocale import gettext_lazy as _
@ -50,6 +51,9 @@ class AttachHTTP(AttachBase):
# The number of bytes in memory to read from the remote source at a time
chunk_size = 8192
# Web based requests are remote/external to our current location
location = ContentLocation.HOSTED
def __init__(self, headers=None, **kwargs):
"""
Initialize HTTP Object
@ -86,6 +90,10 @@ class AttachHTTP(AttachBase):
Perform retrieval of the configuration based on the specified request
"""
if self.location == ContentLocation.INACCESSIBLE:
# our content is inaccessible
return False
# Ensure any existing content set has been invalidated
self.invalidate()

View File

@ -26,7 +26,11 @@
import click
import logging
import platform
import six
import sys
import os
import re
from os.path import isfile
from os.path import expanduser
from os.path import expandvars
@ -39,6 +43,7 @@ from . import AppriseConfig
from .utils import parse_list
from .common import NOTIFY_TYPES
from .common import NOTIFY_FORMATS
from .common import ContentLocation
from .logger import logger
from . import __title__
@ -133,6 +138,9 @@ def print_version_msg():
help='Perform a trial run but only prints the notification '
'services to-be triggered to stdout. Notifications are never '
'sent using this mode.')
@click.option('--details', '-l', is_flag=True,
help='Prints details about the current services supported by '
'Apprise.')
@click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH,
type=int,
help='The number of recursive import entries that can be '
@ -141,6 +149,8 @@ def print_version_msg():
@click.option('--verbose', '-v', count=True,
help='Makes the operation more talkative. Use multiple v to '
'increase the verbosity. I.e.: -vvvv')
@click.option('--interpret-escapes', '-e', is_flag=True,
help='Enable interpretation of backslash escapes')
@click.option('--debug', '-D', is_flag=True, help='Debug mode')
@click.option('--version', '-V', is_flag=True,
help='Display the apprise version and exit.')
@ -148,7 +158,7 @@ def print_version_msg():
metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',)
def main(body, title, config, attach, urls, notification_type, theme, tag,
input_format, dry_run, recursion_depth, verbose, disable_async,
debug, version):
details, interpret_escapes, debug, version):
"""
Send a notification to all of the specified servers identified by their
URLs the content provided within the title, body and notification-type.
@ -224,8 +234,15 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
# Prepare our asset
asset = AppriseAsset(
# Our body format
body_format=input_format,
# Interpret Escapes
interpret_escapes=interpret_escapes,
# Set the theme
theme=theme,
# Async mode is only used for Python v3+ and allows a user to send
# all of their notifications asyncronously. This was made an option
# incase there are problems in the future where it's better that
@ -234,18 +251,132 @@ def main(body, title, config, attach, urls, notification_type, theme, tag,
)
# Create our Apprise object
a = Apprise(asset=asset, debug=debug)
a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL)
# Load our configuration if no URLs or specified configuration was
# identified on the command line
a.add(AppriseConfig(
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))]
if not (config or urls) else config,
asset=asset, recursion=recursion_depth))
if details:
# Print details and exit
results = a.details(show_requirements=True, show_disabled=True)
# Load our inventory up
for url in urls:
a.add(url)
# Sort our results:
plugins = sorted(
results['schemas'], key=lambda i: str(i['service_name']))
for entry in plugins:
protocols = [] if not entry['protocols'] else \
[p for p in entry['protocols']
if isinstance(p, six.string_types)]
protocols.extend(
[] if not entry['secure_protocols'] else
[p for p in entry['secure_protocols']
if isinstance(p, six.string_types)])
if len(protocols) == 1:
# Simplify view by swapping {schema} with the single
# protocol value
# Convert tuple to list
entry['details']['templates'] = \
list(entry['details']['templates'])
for x in range(len(entry['details']['templates'])):
entry['details']['templates'][x] = \
re.sub(
r'^[^}]+}://',
'{}://'.format(protocols[0]),
entry['details']['templates'][x])
click.echo(click.style(
'{} {:<30} '.format(
'+' if entry['enabled'] else '-',
str(entry['service_name'])),
fg="green" if entry['enabled'] else "red", bold=True),
nl=(not entry['enabled'] or len(protocols) == 1))
if not entry['enabled']:
if entry['requirements']['details']:
click.echo(
' ' + str(entry['requirements']['details']))
if entry['requirements']['packages_required']:
click.echo(' Python Packages Required:')
for req in entry['requirements']['packages_required']:
click.echo(' - ' + req)
if entry['requirements']['packages_recommended']:
click.echo(' Python Packages Recommended:')
for req in entry['requirements']['packages_recommended']:
click.echo(' - ' + req)
# new line padding between entries
click.echo()
continue
if len(protocols) > 1:
click.echo('| Schema(s): {}'.format(
', '.join(protocols),
))
prefix = ' - '
click.echo('{}{}'.format(
prefix,
'\n{}'.format(prefix).join(entry['details']['templates'])))
# new line padding between entries
click.echo()
sys.exit(0)
# The priorities of what is accepted are parsed in order below:
# 1. URLs by command line
# 2. Configuration by command line
# 3. URLs by environment variable: APPRISE_URLS
# 4. Configuration by environment variable: APPRISE_CONFIG
# 5. Default Configuration File(s) (if found)
#
if urls:
if tag:
# Ignore any tags specified
logger.warning(
'--tag (-g) entries are ignored when using specified URLs')
tag = None
# Load our URLs (if any defined)
for url in urls:
a.add(url)
if config:
# Provide a warning to the end user if they specified both
logger.warning(
'You defined both URLs and a --config (-c) entry; '
'Only the URLs will be referenced.')
elif config:
# We load our configuration file(s) now only if no URLs were specified
# Specified config entries trump all
a.add(AppriseConfig(
paths=config, asset=asset, recursion=recursion_depth))
elif os.environ.get('APPRISE_URLS', '').strip():
logger.debug('Loading provided APPRISE_URLS environment variable')
if tag:
# Ignore any tags specified
logger.warning(
'--tag (-g) entries are ignored when using specified URLs')
tag = None
# Attempt to use our APPRISE_URLS environment variable (if populated)
a.add(os.environ['APPRISE_URLS'].strip())
elif os.environ.get('APPRISE_CONFIG', '').strip():
logger.debug('Loading provided APPRISE_CONFIG environment variable')
# Fall back to config environment variable (if populated)
a.add(AppriseConfig(
paths=os.environ['APPRISE_CONFIG'].strip(),
asset=asset, recursion=recursion_depth))
else:
# Load default configuration
a.add(AppriseConfig(
paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))],
asset=asset, recursion=recursion_depth))
if len(a) == 0:
logger.error(

View File

@ -130,28 +130,58 @@ CONFIG_FORMATS = (
)
class ConfigIncludeMode(object):
class ContentIncludeMode(object):
"""
The different Cofiguration inclusion modes. All Configuration
plugins will have one of these associated with it.
The different Content inclusion modes. All content based plugins will
have one of these associated with it.
"""
# - Configuration inclusion of same type only; hence a file:// can include
# - Content inclusion of same type only; hence a file:// can include
# a file://
# - Cross file inclusion is not allowed unless insecure_includes (a flag)
# is set to True. In these cases STRICT acts as type ALWAYS
STRICT = 'strict'
# This configuration type can never be included
# This content type can never be included
NEVER = 'never'
# File configuration can always be included
# This content can always be included
ALWAYS = 'always'
CONFIG_INCLUDE_MODES = (
ConfigIncludeMode.STRICT,
ConfigIncludeMode.NEVER,
ConfigIncludeMode.ALWAYS,
CONTENT_INCLUDE_MODES = (
ContentIncludeMode.STRICT,
ContentIncludeMode.NEVER,
ContentIncludeMode.ALWAYS,
)
class ContentLocation(object):
"""
This is primarily used for handling file attachments. The idea is
to track the source of the attachment itself. We don't want
remote calls to a server to access local attachments for example.
By knowing the attachment type and cross-associating it with how
we plan on accessing the content, we can make a judgement call
(for security reasons) if we will allow it.
Obviously local uses of apprise can access both local and remote
type files.
"""
# Content is located locally (on the same server as apprise)
LOCAL = 'local'
# Content is located in a remote location
HOSTED = 'hosted'
# Content is inaccessible
INACCESSIBLE = 'n/a'
CONTENT_LOCATIONS = (
ContentLocation.LOCAL,
ContentLocation.HOSTED,
ContentLocation.INACCESSIBLE,
)
# This is a reserved tag that is automatically assigned to every

15
libs/apprise/common.pyi Normal file
View File

@ -0,0 +1,15 @@
class NotifyType:
INFO: NotifyType
SUCCESS: NotifyType
WARNING: NotifyType
FAILURE: NotifyType
class NotifyFormat:
TEXT: NotifyFormat
HTML: NotifyFormat
MARKDOWN: NotifyFormat
class ContentLocation:
LOCAL: ContentLocation
HOSTED: ContentLocation
INACCESSIBLE: ContentLocation

View File

@ -34,13 +34,18 @@ from ..AppriseAsset import AppriseAsset
from ..URLBase import URLBase
from ..common import ConfigFormat
from ..common import CONFIG_FORMATS
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..utils import GET_SCHEMA_RE
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import parse_urls
from ..utils import cwe312_url
from . import SCHEMA_MAP
# Test whether token is valid or not
VALID_TOKEN = re.compile(
r'(?P<token>[a-z0-9][a-z0-9_]+)', re.I)
class ConfigBase(URLBase):
"""
@ -65,7 +70,7 @@ class ConfigBase(URLBase):
# By default all configuration is not includable using the 'include'
# line found in configuration files.
allow_cross_includes = ConfigIncludeMode.NEVER
allow_cross_includes = ContentIncludeMode.NEVER
# the config path manages the handling of relative include
config_path = os.getcwd()
@ -205,8 +210,8 @@ class ConfigBase(URLBase):
# Configuration files were detected; recursively populate them
# If we have been configured to do so
for url in configs:
if self.recursion > 0:
if self.recursion > 0:
# Attempt to acquire the schema at the very least to allow
# our configuration based urls.
schema = GET_SCHEMA_RE.match(url)
@ -219,6 +224,7 @@ class ConfigBase(URLBase):
url = os.path.join(self.config_path, url)
url = '{}://{}'.format(schema, URLBase.quote(url))
else:
# Ensure our schema is always in lower case
schema = schema.group('schema').lower()
@ -229,27 +235,31 @@ class ConfigBase(URLBase):
'Unsupported include schema {}.'.format(schema))
continue
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# Parse our url details of the server object as dictionary
# containing all of the information parsed from our URL
results = SCHEMA_MAP[schema].parse_url(url)
if not results:
# Failed to parse the server URL
self.logger.warning(
'Unparseable include URL {}'.format(url))
'Unparseable include URL {}'.format(loggable_url))
continue
# Handle cross inclusion based on allow_cross_includes rules
if (SCHEMA_MAP[schema].allow_cross_includes ==
ConfigIncludeMode.STRICT
ContentIncludeMode.STRICT
and schema not in self.schemas()
and not self.insecure_includes) or \
SCHEMA_MAP[schema].allow_cross_includes == \
ConfigIncludeMode.NEVER:
ContentIncludeMode.NEVER:
# Prevent the loading if insecure base protocols
ConfigBase.logger.warning(
'Including {}:// based configuration is prohibited. '
'Ignoring URL {}'.format(schema, url))
'Ignoring URL {}'.format(schema, loggable_url))
continue
# Prepare our Asset Object
@ -275,7 +285,7 @@ class ConfigBase(URLBase):
except Exception as e:
# the arguments are invalid or can not be used.
self.logger.warning(
'Could not load include URL: {}'.format(url))
'Could not load include URL: {}'.format(loggable_url))
self.logger.debug('Loading Exception: {}'.format(str(e)))
continue
@ -288,16 +298,23 @@ class ConfigBase(URLBase):
del cfg_plugin
else:
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
self.logger.debug(
'Recursion limit reached; ignoring Include URL: %s' % url)
'Recursion limit reached; ignoring Include URL: %s',
loggable_url)
if self._cached_servers:
self.logger.info('Loaded {} entries from {}'.format(
len(self._cached_servers), self.url()))
self.logger.info(
'Loaded {} entries from {}'.format(
len(self._cached_servers),
self.url(privacy=asset.secure_logging)))
else:
self.logger.warning(
'Failed to load Apprise configuration from {}'.format(
self.url()))
self.url(privacy=asset.secure_logging)))
# Set the time our content was cached at
self._cached_time = time.time()
@ -527,6 +544,9 @@ class ConfigBase(URLBase):
# the include keyword
configs = list()
# Prepare our Asset Object
asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Define what a valid line should look like
valid_line_re = re.compile(
r'^\s*(?P<line>([;#]+(?P<comment>.*))|'
@ -563,27 +583,37 @@ class ConfigBase(URLBase):
continue
if config:
ConfigBase.logger.debug('Include URL: {}'.format(config))
# CWE-312 (Secure Logging) Handling
loggable_url = config if not asset.secure_logging \
else cwe312_url(config)
ConfigBase.logger.debug(
'Include URL: {}'.format(loggable_url))
# Store our include line
configs.append(config.strip())
continue
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
# Acquire our url tokens
results = plugins.url_to_dict(url)
results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if results is None:
# Failed to parse the server URL
ConfigBase.logger.warning(
'Unparseable URL {} on line {}.'.format(url, line))
'Unparseable URL {} on line {}.'.format(
loggable_url, line))
continue
# Build a list of tags to associate with the newly added
# notifications if any were set
results['tag'] = set(parse_list(result.group('tags')))
# Prepare our Asset Object
results['asset'] = \
asset if isinstance(asset, AppriseAsset) else AppriseAsset()
# Set our Asset Object
results['asset'] = asset
try:
# Attempt to create an instance of our plugin using the
@ -591,13 +621,14 @@ class ConfigBase(URLBase):
plugin = plugins.SCHEMA_MAP[results['schema']](**results)
# Create log entry of loaded URL
ConfigBase.logger.debug('Loaded URL: {}'.format(plugin.url()))
ConfigBase.logger.debug(
'Loaded URL: %s', plugin.url(privacy=asset.secure_logging))
except Exception as e:
# the arguments are invalid or can not be used.
ConfigBase.logger.warning(
'Could not load URL {} on line {}.'.format(
url, line))
loggable_url, line))
ConfigBase.logger.debug('Loading Exception: %s' % str(e))
continue
@ -633,7 +664,9 @@ class ConfigBase(URLBase):
# Load our data (safely)
result = yaml.load(content, Loader=yaml.SafeLoader)
except (AttributeError, yaml.error.MarkedYAMLError) as e:
except (AttributeError,
yaml.parser.ParserError,
yaml.error.MarkedYAMLError) as e:
# Invalid content
ConfigBase.logger.error(
'Invalid Apprise YAML data specified.')
@ -671,7 +704,9 @@ class ConfigBase(URLBase):
continue
if not (hasattr(asset, k) and
isinstance(getattr(asset, k), six.string_types)):
isinstance(getattr(asset, k),
(bool, six.string_types))):
# We can't set a function or non-string set value
ConfigBase.logger.warning(
'Invalid asset key "{}".'.format(k))
@ -681,15 +716,23 @@ class ConfigBase(URLBase):
# Convert to an empty string
v = ''
if not isinstance(v, six.string_types):
if (isinstance(v, (bool, six.string_types))
and isinstance(getattr(asset, k), bool)):
# If the object in the Asset is a boolean, then
# we want to convert the specified string to
# match that.
setattr(asset, k, parse_bool(v))
elif isinstance(v, six.string_types):
# Set our asset object with the new value
setattr(asset, k, v.strip())
else:
# we must set strings with a string
ConfigBase.logger.warning(
'Invalid asset value to "{}".'.format(k))
continue
# Set our asset object with the new value
setattr(asset, k, v.strip())
#
# global tag root directive
#
@ -740,6 +783,10 @@ class ConfigBase(URLBase):
# we can. Reset it to None on each iteration
results = list()
# CWE-312 (Secure Logging) Handling
loggable_url = url if not asset.secure_logging \
else cwe312_url(url)
if isinstance(url, six.string_types):
# We're just a simple URL string...
schema = GET_SCHEMA_RE.match(url)
@ -748,16 +795,18 @@ class ConfigBase(URLBase):
# config file at least has something to take action
# with.
ConfigBase.logger.warning(
'Invalid URL {}, entry #{}'.format(url, no + 1))
'Invalid URL {}, entry #{}'.format(
loggable_url, no + 1))
continue
# We found a valid schema worthy of tracking; store it's
# details:
_results = plugins.url_to_dict(url)
_results = plugins.url_to_dict(
url, secure_logging=asset.secure_logging)
if _results is None:
ConfigBase.logger.warning(
'Unparseable URL {}, entry #{}'.format(
url, no + 1))
loggable_url, no + 1))
continue
# add our results to our global set
@ -791,19 +840,20 @@ class ConfigBase(URLBase):
.format(key, no + 1))
continue
# Store our URL and Schema Regex
_url = key
# Store our schema
schema = _schema.group('schema').lower()
# Store our URL and Schema Regex
_url = key
if _url is None:
# the loop above failed to match anything
ConfigBase.logger.warning(
'Unsupported schema in urls, entry #{}'.format(no + 1))
'Unsupported URL, entry #{}'.format(no + 1))
continue
_results = plugins.url_to_dict(_url)
_results = plugins.url_to_dict(
_url, secure_logging=asset.secure_logging)
if _results is None:
# Setup dictionary
_results = {
@ -830,12 +880,33 @@ class ConfigBase(URLBase):
if 'schema' in entries:
del entries['schema']
# support our special tokens (if they're present)
if schema in plugins.SCHEMA_MAP:
entries = ConfigBase._special_token_handler(
schema, entries)
# Extend our dictionary with our new entries
r.update(entries)
# add our results to our global set
results.append(r)
elif isinstance(tokens, dict):
# support our special tokens (if they're present)
if schema in plugins.SCHEMA_MAP:
tokens = ConfigBase._special_token_handler(
schema, tokens)
# Copy ourselves a template of our parsed URL as a base to
# work with
r = _results.copy()
# add our result set
r.update(tokens)
# add our results to our global set
results.append(r)
else:
# add our results to our global set
results.append(_results)
@ -867,6 +938,17 @@ class ConfigBase(URLBase):
# Just use the global settings
_results['tag'] = global_tags
for key in list(_results.keys()):
# Strip out any tokens we know that we can't accept and
# warn the user
match = VALID_TOKEN.match(key)
if not match:
ConfigBase.logger.warning(
'Ignoring invalid token ({}) found in YAML '
'configuration entry #{}, item #{}'
.format(key, no + 1, entry))
del _results[key]
ConfigBase.logger.trace(
'URL #{}: {} unpacked as:{}{}'
.format(no + 1, url, os.linesep, os.linesep.join(
@ -883,7 +965,8 @@ class ConfigBase(URLBase):
# Create log entry of loaded URL
ConfigBase.logger.debug(
'Loaded URL: {}'.format(plugin.url()))
'Loaded URL: {}'.format(
plugin.url(privacy=asset.secure_logging)))
except Exception as e:
# the arguments are invalid or can not be used.
@ -913,6 +996,135 @@ class ConfigBase(URLBase):
# Pop the element off of the stack
return self._cached_servers.pop(index)
@staticmethod
def _special_token_handler(schema, tokens):
"""
This function takes a list of tokens and updates them to no longer
include any special tokens such as +,-, and :
- schema must be a valid schema of a supported plugin type
- tokens must be a dictionary containing the yaml entries parsed.
The idea here is we can post process a set of tokens provided in
a YAML file where the user provided some of the special keywords.
We effectivley look up what these keywords map to their appropriate
value they're expected
"""
# Create a copy of our dictionary
tokens = tokens.copy()
for kw, meta in plugins.SCHEMA_MAP[schema]\
.template_kwargs.items():
# Determine our prefix:
prefix = meta.get('prefix', '+')
# Detect any matches
matches = \
{k[1:]: str(v) for k, v in tokens.items()
if k.startswith(prefix)}
if not matches:
# we're done with this entry
continue
if not isinstance(tokens.get(kw), dict):
# Invalid; correct it
tokens[kw] = dict()
# strip out processed tokens
tokens = {k: v for k, v in tokens.items()
if not k.startswith(prefix)}
# Update our entries
tokens[kw].update(matches)
# Now map our tokens accordingly to the class templates defined by
# each service.
#
# This is specifically used for YAML file parsing. It allows a user to
# define an entry such as:
#
# urls:
# - mailto://user:pass@domain:
# - to: user1@hotmail.com
# - to: user2@hotmail.com
#
# Under the hood, the NotifyEmail() class does not parse the `to`
# argument. It's contents needs to be mapped to `targets`. This is
# defined in the class via the `template_args` and template_tokens`
# section.
#
# This function here allows these mappings to take place within the
# YAML file as independant arguments.
class_templates = \
plugins.details(plugins.SCHEMA_MAP[schema])
for key in list(tokens.keys()):
if key not in class_templates['args']:
# No need to handle non-arg entries
continue
# get our `map_to` and/or 'alias_of' value (if it exists)
map_to = class_templates['args'][key].get(
'alias_of', class_templates['args'][key].get('map_to', ''))
if map_to == key:
# We're already good as we are now
continue
if map_to in class_templates['tokens']:
meta = class_templates['tokens'][map_to]
else:
meta = class_templates['args'].get(
map_to, class_templates['args'][key])
# Perform a translation/mapping if our code reaches here
value = tokens[key]
del tokens[key]
# Detect if we're dealign with a list or not
is_list = re.search(
r'^(list|choice):.*',
meta.get('type'),
re.IGNORECASE)
if map_to not in tokens:
tokens[map_to] = [] if is_list \
else meta.get('default')
elif is_list and not isinstance(tokens.get(map_to), list):
# Convert ourselves to a list if we aren't already
tokens[map_to] = [tokens[map_to]]
# Type Conversion
if re.search(
r'^(choice:)?string',
meta.get('type'),
re.IGNORECASE) \
and not isinstance(value, six.string_types):
# Ensure our format is as expected
value = str(value)
# Apply any further translations if required (absolute map)
# This is the case when an arg maps to a token which further
# maps to a different function arg on the class constructor
abs_map = meta.get('map_to', map_to)
# Set our token as how it was provided by the configuration
if isinstance(tokens.get(map_to), list):
tokens[abs_map].append(value)
else:
tokens[abs_map] = value
# Return our tokens
return tokens
def __getitem__(self, index):
"""
Returns the indexed server entry associated with the loaded

View File

@ -0,0 +1,3 @@
from .. import URLBase
class ConfigBase(URLBase): ...

View File

@ -28,7 +28,7 @@ import io
import os
from .ConfigBase import ConfigBase
from ..common import ConfigFormat
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..AppriseLocale import gettext_lazy as _
@ -44,7 +44,7 @@ class ConfigFile(ConfigBase):
protocol = 'file'
# Configuration file inclusion can only be of the same type
allow_cross_includes = ConfigIncludeMode.STRICT
allow_cross_includes = ContentIncludeMode.STRICT
def __init__(self, path, **kwargs):
"""

View File

@ -28,7 +28,7 @@ import six
import requests
from .ConfigBase import ConfigBase
from ..common import ConfigFormat
from ..common import ConfigIncludeMode
from ..common import ContentIncludeMode
from ..URLBase import PrivacyMode
from ..AppriseLocale import gettext_lazy as _
@ -66,7 +66,7 @@ class ConfigHTTP(ConfigBase):
max_error_buffer_size = 2048
# Configuration file inclusion can always include this type
allow_cross_includes = ConfigIncludeMode.ALWAYS
allow_cross_includes = ContentIncludeMode.ALWAYS
def __init__(self, headers=None, **kwargs):
"""

Some files were not shown because too many files have changed in this diff Show More