diff --git a/README.md b/README.md index 109804d85..b1a1c24bd 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,7 @@ If you need something that is not already part of Bazarr, feel free to create a * Assrt * BetaSeries * BSplayer +* Embedded Subtitles * GreekSubtitles * Hosszupuska * LegendasDivx diff --git a/bazarr.py b/bazarr.py index 5b6d077ba..a61540149 100644 --- a/bazarr.py +++ b/bazarr.py @@ -20,8 +20,8 @@ def check_python_version(): print("Python " + minimum_py3_str + " or greater required. " "Current version is " + platform.python_version() + ". Please upgrade Python.") sys.exit(1) - elif int(python_version[0]) == 3 and int(python_version[1]) == 9: - print("Python 3.9.x is unsupported. Current version is " + platform.python_version() + + elif int(python_version[0]) == 3 and int(python_version[1]) > 10: + print("Python version greater than 3.10.x is unsupported. Current version is " + platform.python_version() + ". Keep in mind that even if it works, you're on your own.") elif (int(python_version[0]) == minimum_py3_tuple[0] and int(python_version[1]) < minimum_py3_tuple[1]) or \ (int(python_version[0]) != minimum_py3_tuple[0]): diff --git a/bazarr/api.py b/bazarr/api.py deleted file mode 100644 index 97ea20c4a..000000000 --- a/bazarr/api.py +++ /dev/null @@ -1,2192 +0,0 @@ -# coding=utf-8 - -import sys -import os -import ast -from datetime import timedelta -from dateutil import rrule -import pretty -import time -import operator -from operator import itemgetter -from functools import reduce -import platform -import re -import json -import hashlib -import apprise -import gc -from peewee import fn, Value -import requests -from bs4 import BeautifulSoup as bso - -from get_args import args -from config import settings, base_url, save_settings, get_settings -from logger import empty_log -from init import startTime - -from init import * -import logging -from database import get_exclusion_clause, get_profiles_list, get_desired_languages, get_profile_id_name, \ - get_audio_profile_languages, update_profile_id_list, convert_list_to_clause, TableEpisodes, TableShows, \ - TableMovies, TableSettingsLanguages, TableSettingsNotifier, TableLanguagesProfiles, TableHistory, \ - TableHistoryMovie, TableBlacklist, TableBlacklistMovie -from helper import path_mappings -from get_languages import language_from_alpha2, language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2 -from get_subtitle import download_subtitle, series_download_subtitles, manual_search, manual_download_subtitle, \ - manual_upload_subtitle, wanted_search_missing_subtitles_series, wanted_search_missing_subtitles_movies, \ - episode_download_subtitles, movies_download_subtitles -from notifier import send_notifications, send_notifications_movie -from list_subtitles import store_subtitles, store_subtitles_movie, series_scan_subtitles, movies_scan_subtitles, \ - list_missing_subtitles, list_missing_subtitles_movies -from utils import history_log, history_log_movie, blacklist_log, blacklist_delete, blacklist_delete_all, \ - blacklist_log_movie, blacklist_delete_movie, blacklist_delete_all_movie, get_sonarr_info, get_radarr_info, \ - delete_subtitles, subtitles_apply_mods, translate_subtitles_file, check_credentials, get_health_issues -from get_providers import get_providers, get_providers_auth, list_throttled_providers, reset_throttled_providers, \ - get_throttled_providers, set_throttled_providers -from event_handler import event_stream -from scheduler import scheduler -from subsyncer import subsync -from filesystem import browse_bazarr_filesystem, browse_sonarr_filesystem, browse_radarr_filesystem - -from subliminal_patch.core import SUBTITLE_EXTENSIONS, guessit - -from flask import Flask, jsonify, request, Response, Blueprint, url_for, make_response, session - -from flask_restful import Resource, Api, abort -from functools import wraps - -api_bp = Blueprint('api', __name__, url_prefix=base_url.rstrip('/') + '/api') -api = Api(api_bp) - -None_Keys = ['null', 'undefined', '', None] - -False_Keys = ['False', 'false', '0'] - - -def authenticate(actual_method): - @wraps(actual_method) - def wrapper(*args, **kwargs): - apikey_settings = settings.auth.apikey - apikey_get = request.args.get('apikey') - apikey_post = request.form.get('apikey') - apikey_header = None - if 'X-API-KEY' in request.headers: - apikey_header = request.headers['X-API-KEY'] - - if apikey_settings in [apikey_get, apikey_post, apikey_header]: - return actual_method(*args, **kwargs) - - return abort(401) - - return wrapper - - -def postprocess(item): - # Remove ffprobe_cache - if 'ffprobe_cache' in item: - del (item['ffprobe_cache']) - - # Parse tags - if 'tags' in item: - if item['tags'] is None: - item['tags'] = [] - else: - item['tags'] = ast.literal_eval(item['tags']) - - if 'monitored' in item: - if item['monitored'] is None: - item['monitored'] = False - else: - item['monitored'] = item['monitored'] == 'True' - - if 'hearing_impaired' in item and item['hearing_impaired'] is not None: - if item['hearing_impaired'] is None: - item['hearing_impaired'] = False - else: - item['hearing_impaired'] = item['hearing_impaired'] == 'True' - - if 'language' in item: - if item['language'] == 'None': - item['language'] = None - elif item['language'] is not None: - splitted_language = item['language'].split(':') - item['language'] = {"name": language_from_alpha2(splitted_language[0]), - "code2": splitted_language[0], - "code3": alpha3_from_alpha2(splitted_language[0]), - "forced": True if item['language'].endswith(':forced') else False, - "hi": True if item['language'].endswith(':hi') else False} - - -def postprocessSeries(item): - postprocess(item) - # Parse audio language - if 'audio_language' in item and item['audio_language'] is not None: - item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId']) - - if 'alternateTitles' in item: - if item['alternateTitles'] is None: - item['alternativeTitles'] = [] - else: - item['alternativeTitles'] = ast.literal_eval(item['alternateTitles']) - del item["alternateTitles"] - - # Parse seriesType - if 'seriesType' in item and item['seriesType'] is not None: - item['seriesType'] = item['seriesType'].capitalize() - - if 'path' in item: - item['path'] = path_mappings.path_replace(item['path']) - - # map poster and fanart to server proxy - if 'poster' in item: - poster = item['poster'] - item['poster'] = f"{base_url}/images/series{poster}" - - if 'fanart' in item: - fanart = item['fanart'] - item['fanart'] = f"{base_url}/images/series{fanart}" - - -def postprocessEpisode(item): - postprocess(item) - if 'audio_language' in item and item['audio_language'] is not None: - item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId']) - - if 'subtitles' in item: - if item['subtitles'] is None: - raw_subtitles = [] - else: - raw_subtitles = ast.literal_eval(item['subtitles']) - subtitles = [] - - for subs in raw_subtitles: - subtitle = subs[0].split(':') - sub = {"name": language_from_alpha2(subtitle[0]), - "code2": subtitle[0], - "code3": alpha3_from_alpha2(subtitle[0]), - "path": path_mappings.path_replace(subs[1]), - "forced": False, - "hi": False} - if len(subtitle) > 1: - sub["forced"] = True if subtitle[1] == 'forced' else False - sub["hi"] = True if subtitle[1] == 'hi' else False - - subtitles.append(sub) - - item.update({"subtitles": subtitles}) - - # Parse missing subtitles - if 'missing_subtitles' in item: - if item['missing_subtitles'] is None: - item['missing_subtitles'] = [] - else: - item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles']) - for i, subs in enumerate(item['missing_subtitles']): - subtitle = subs.split(':') - item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]), - "code2": subtitle[0], - "code3": alpha3_from_alpha2(subtitle[0]), - "forced": False, - "hi": False} - if len(subtitle) > 1: - item['missing_subtitles'][i].update({ - "forced": True if subtitle[1] == 'forced' else False, - "hi": True if subtitle[1] == 'hi' else False - }) - - if 'scene_name' in item: - item["sceneName"] = item["scene_name"] - del item["scene_name"] - - if 'path' in item and item['path']: - # Provide mapped path - item['path'] = path_mappings.path_replace(item['path']) - - -# TODO: Move -def postprocessMovie(item): - postprocess(item) - # Parse audio language - if 'audio_language' in item and item['audio_language'] is not None: - item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId']) - - # Parse alternate titles - if 'alternativeTitles' in item: - if item['alternativeTitles'] is None: - item['alternativeTitles'] = [] - else: - item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles']) - - # Parse failed attempts - if 'failedAttempts' in item: - if item['failedAttempts']: - item['failedAttempts'] = ast.literal_eval(item['failedAttempts']) - - # Parse subtitles - if 'subtitles' in item: - if item['subtitles'] is None: - item['subtitles'] = [] - else: - item['subtitles'] = ast.literal_eval(item['subtitles']) - for i, subs in enumerate(item['subtitles']): - language = subs[0].split(':') - item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]), - "name": language_from_alpha2(language[0]), - "code2": language[0], - "code3": alpha3_from_alpha2(language[0]), - "forced": False, - "hi": False} - if len(language) > 1: - item['subtitles'][i].update({ - "forced": True if language[1] == 'forced' else False, - "hi": True if language[1] == 'hi' else False - }) - - if settings.general.getboolean('embedded_subs_show_desired'): - desired_lang_list = get_desired_languages(item['profileId']) - item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']] - - item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced')) - - # Parse missing subtitles - if 'missing_subtitles' in item: - if item['missing_subtitles'] is None: - item['missing_subtitles'] = [] - else: - item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles']) - for i, subs in enumerate(item['missing_subtitles']): - language = subs.split(':') - item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]), - "code2": language[0], - "code3": alpha3_from_alpha2(language[0]), - "forced": False, - "hi": False} - if len(language) > 1: - item['missing_subtitles'][i].update({ - "forced": True if language[1] == 'forced' else False, - "hi": True if language[1] == 'hi' else False - }) - - # Provide mapped path - if 'path' in item: - if item['path']: - item['path'] = path_mappings.path_replace_movie(item['path']) - - if 'subtitles_path' in item: - # Provide mapped subtitles path - item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path']) - - # map poster and fanart to server proxy - if 'poster' in item: - poster = item['poster'] - item['poster'] = f"{base_url}/images/movies{poster}" - - if 'fanart' in item: - fanart = item['fanart'] - item['fanart'] = f"{base_url}/images/movies{fanart}" - - -class SystemAccount(Resource): - def post(self): - if settings.auth.type != 'form': - return '', 405 - - action = request.args.get('action') - if action == 'login': - username = request.form.get('username') - password = request.form.get('password') - if check_credentials(username, password): - session['logged_in'] = True - return '', 204 - elif action == 'logout': - session.clear() - gc.collect() - return '', 204 - - return '', 401 - - -class System(Resource): - @authenticate - def post(self): - from server import webserver - action = request.args.get('action') - if action == "shutdown": - webserver.shutdown() - elif action == "restart": - webserver.restart() - return '', 204 - - -class Badges(Resource): - @authenticate - def get(self): - episodes_conditions = [(TableEpisodes.missing_subtitles is not None), - (TableEpisodes.missing_subtitles != '[]')] - episodes_conditions += get_exclusion_clause('series') - missing_episodes = TableEpisodes.select(TableShows.tags, - TableShows.seriesType, - TableEpisodes.monitored)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(reduce(operator.and_, episodes_conditions))\ - .count() - - movies_conditions = [(TableMovies.missing_subtitles is not None), - (TableMovies.missing_subtitles != '[]')] - movies_conditions += get_exclusion_clause('movie') - missing_movies = TableMovies.select(TableMovies.tags, - TableMovies.monitored)\ - .where(reduce(operator.and_, movies_conditions))\ - .count() - - throttled_providers = len(eval(str(get_throttled_providers()))) - - health_issues = len(get_health_issues()) - - result = { - "episodes": missing_episodes, - "movies": missing_movies, - "providers": throttled_providers, - "status": health_issues - } - return jsonify(result) - - -class Languages(Resource): - @authenticate - def get(self): - history = request.args.get('history') - if history and history not in False_Keys: - languages = list(TableHistory.select(TableHistory.language) - .where(TableHistory.language != None) - .dicts()) - languages += list(TableHistoryMovie.select(TableHistoryMovie.language) - .where(TableHistoryMovie.language != None) - .dicts()) - languages_list = list(set([l['language'].split(':')[0] for l in languages])) - languages_dicts = [] - for language in languages_list: - code2 = None - if len(language) == 2: - code2 = language - elif len(language) == 3: - code2 = alpha2_from_alpha3(language) - else: - continue - - if not any(x['code2'] == code2 for x in languages_dicts): - try: - languages_dicts.append({ - 'code2': code2, - 'name': language_from_alpha2(code2), - # Compatibility: Use false temporarily - 'enabled': False - }) - except: - continue - return jsonify(sorted(languages_dicts, key=itemgetter('name'))) - - result = TableSettingsLanguages.select(TableSettingsLanguages.name, - TableSettingsLanguages.code2, - TableSettingsLanguages.enabled)\ - .order_by(TableSettingsLanguages.name).dicts() - result = list(result) - for item in result: - item['enabled'] = item['enabled'] == 1 - return jsonify(result) - - -class LanguagesProfiles(Resource): - @authenticate - def get(self): - return jsonify(get_profiles_list()) - - -class Notifications(Resource): - @authenticate - def patch(self): - url = request.form.get("url") - - asset = apprise.AppriseAsset(async_mode=False) - - apobj = apprise.Apprise(asset=asset) - - apobj.add(url) - - apobj.notify( - title='Bazarr test notification', - body='Test notification' - ) - - return '', 204 - - -class Searches(Resource): - @authenticate - def get(self): - query = request.args.get('query') - search_list = [] - - if query: - if settings.general.getboolean('use_sonarr'): - # Get matching series - series = TableShows.select(TableShows.title, - TableShows.sonarrSeriesId, - TableShows.year)\ - .where(TableShows.title.contains(query))\ - .order_by(TableShows.title)\ - .dicts() - series = list(series) - search_list += series - - if settings.general.getboolean('use_radarr'): - # Get matching movies - movies = TableMovies.select(TableMovies.title, - TableMovies.radarrId, - TableMovies.year) \ - .where(TableMovies.title.contains(query)) \ - .order_by(TableMovies.title) \ - .dicts() - movies = list(movies) - search_list += movies - - return jsonify(search_list) - - -class SystemSettings(Resource): - @authenticate - def get(self): - data = get_settings() - - notifications = TableSettingsNotifier.select().order_by(TableSettingsNotifier.name).dicts() - notifications = list(notifications) - for i, item in enumerate(notifications): - item["enabled"] = item["enabled"] == 1 - notifications[i] = item - - data['notifications'] = dict() - data['notifications']['providers'] = notifications - - return jsonify(data) - - @authenticate - def post(self): - enabled_languages = request.form.getlist('languages-enabled') - if len(enabled_languages) != 0: - TableSettingsLanguages.update({ - TableSettingsLanguages.enabled: 0 - }).execute() - for code in enabled_languages: - TableSettingsLanguages.update({ - TableSettingsLanguages.enabled: 1 - })\ - .where(TableSettingsLanguages.code2 == code)\ - .execute() - event_stream("languages") - - languages_profiles = request.form.get('languages-profiles') - if languages_profiles: - existing_ids = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId).dicts() - existing_ids = list(existing_ids) - existing = [x['profileId'] for x in existing_ids] - for item in json.loads(languages_profiles): - if item['profileId'] in existing: - # Update existing profiles - TableLanguagesProfiles.update({ - TableLanguagesProfiles.name: item['name'], - TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None, - TableLanguagesProfiles.items: json.dumps(item['items']) - })\ - .where(TableLanguagesProfiles.profileId == item['profileId'])\ - .execute() - existing.remove(item['profileId']) - else: - # Add new profiles - TableLanguagesProfiles.insert({ - TableLanguagesProfiles.profileId: item['profileId'], - TableLanguagesProfiles.name: item['name'], - TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None, - TableLanguagesProfiles.items: json.dumps(item['items']) - }).execute() - for profileId in existing: - # Unassign this profileId from series and movies - TableShows.update({ - TableShows.profileId: None - }).where(TableShows.profileId == profileId).execute() - TableMovies.update({ - TableMovies.profileId: None - }).where(TableMovies.profileId == profileId).execute() - # Remove deleted profiles - TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute() - - update_profile_id_list() - event_stream("languages") - - if settings.general.getboolean('use_sonarr'): - scheduler.add_job(list_missing_subtitles, kwargs={'send_event': False}) - if settings.general.getboolean('use_radarr'): - scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': False}) - - # Update Notification - notifications = request.form.getlist('notifications-providers') - for item in notifications: - item = json.loads(item) - TableSettingsNotifier.update({ - TableSettingsNotifier.enabled: item['enabled'], - TableSettingsNotifier.url: item['url'] - }).where(TableSettingsNotifier.name == item['name']).execute() - - save_settings(zip(request.form.keys(), request.form.listvalues())) - event_stream("settings") - return '', 204 - - -class SystemTasks(Resource): - @authenticate - def get(self): - taskid = request.args.get('taskid') - - task_list = scheduler.get_task_list() - - if taskid: - for item in task_list: - if item['job_id'] == taskid: - task_list = [item] - continue - - return jsonify(data=task_list) - - @authenticate - def post(self): - taskid = request.form.get('taskid') - - scheduler.execute_job_now(taskid) - - return '', 204 - - -class SystemLogs(Resource): - @authenticate - def get(self): - logs = [] - with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file: - raw_lines = file.read() - lines = raw_lines.split('|\n') - for line in lines: - if line == '': - continue - raw_message = line.split('|') - raw_message_len = len(raw_message) - if raw_message_len > 3: - log = dict() - log["timestamp"] = raw_message[0] - log["type"] = raw_message[1].rstrip() - log["message"] = raw_message[3] - if raw_message_len > 4 and raw_message[4] != '\n': - log['exception'] = raw_message[4].strip('\'').replace(' ', '\u2003\u2003') - logs.append(log) - - logs.reverse() - return jsonify(data=logs) - - @authenticate - def delete(self): - empty_log() - return '', 204 - - -class SystemStatus(Resource): - @authenticate - def get(self): - system_status = {} - system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]}) - system_status.update({'sonarr_version': get_sonarr_info.version()}) - system_status.update({'radarr_version': get_radarr_info.version()}) - system_status.update({'operating_system': platform.platform()}) - system_status.update({'python_version': platform.python_version()}) - system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(__file__))}) - system_status.update({'bazarr_config_directory': args.config_dir}) - system_status.update({'start_time': startTime}) - return jsonify(data=system_status) - - -class SystemHealth(Resource): - @authenticate - def get(self): - return jsonify(data=get_health_issues()) - - -class SystemReleases(Resource): - @authenticate - def get(self): - filtered_releases = [] - try: - with io.open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r', encoding='UTF-8') as f: - releases = json.loads(f.read()) - - for release in releases: - if settings.general.branch == 'master' and not release['prerelease']: - filtered_releases.append(release) - elif settings.general.branch != 'master' and any(not x['prerelease'] for x in filtered_releases): - continue - elif settings.general.branch != 'master': - filtered_releases.append(release) - if settings.general.branch == 'master': - filtered_releases = filtered_releases[:5] - - current_version = os.environ["BAZARR_VERSION"] - - for i, release in enumerate(filtered_releases): - body = release['body'].replace('- ', '').split('\n')[1:] - filtered_releases[i] = {"body": body, - "name": release['name'], - "date": release['date'][:10], - "prerelease": release['prerelease'], - "current": release['name'].lstrip('v') == current_version} - - except Exception as e: - logging.exception( - 'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt')) - return jsonify(data=filtered_releases) - - -class Series(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - seriesId = request.args.getlist('seriesid[]') - - count = TableShows.select().count() - - if len(seriesId) != 0: - result = TableShows.select()\ - .where(TableShows.sonarrSeriesId.in_(seriesId))\ - .order_by(TableShows.sortTitle).dicts() - else: - result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts() - - result = list(result) - - for item in result: - postprocessSeries(item) - - # Add missing subtitles episode count - episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']), - (TableEpisodes.missing_subtitles != '[]')] - episodes_missing_conditions += get_exclusion_clause('series') - - episodeMissingCount = TableEpisodes.select(TableShows.tags, - TableEpisodes.monitored, - TableShows.seriesType)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(reduce(operator.and_, episodes_missing_conditions))\ - .count() - item.update({"episodeMissingCount": episodeMissingCount}) - - # Add episode count - episodeFileCount = TableEpisodes.select(TableShows.tags, - TableEpisodes.monitored, - TableShows.seriesType)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId'])\ - .count() - item.update({"episodeFileCount": episodeFileCount}) - - return jsonify(data=result, total=count) - - @authenticate - def post(self): - seriesIdList = request.form.getlist('seriesid') - profileIdList = request.form.getlist('profileid') - - for idx in range(len(seriesIdList)): - seriesId = seriesIdList[idx] - profileId = profileIdList[idx] - - if profileId in None_Keys: - profileId = None - else: - try: - profileId = int(profileId) - except Exception: - return '', 400 - - TableShows.update({ - TableShows.profileId: profileId - })\ - .where(TableShows.sonarrSeriesId == seriesId)\ - .execute() - - list_missing_subtitles(no=seriesId, send_event=False) - - event_stream(type='series', payload=seriesId) - - episode_id_list = TableEpisodes\ - .select(TableEpisodes.sonarrEpisodeId)\ - .where(TableEpisodes.sonarrSeriesId == seriesId)\ - .dicts() - - for item in episode_id_list: - event_stream(type='episode-wanted', payload=item['sonarrEpisodeId']) - - event_stream(type='badges') - - return '', 204 - - @authenticate - def patch(self): - seriesid = request.form.get('seriesid') - action = request.form.get('action') - if action == "scan-disk": - series_scan_subtitles(seriesid) - return '', 204 - elif action == "search-missing": - series_download_subtitles(seriesid) - return '', 204 - elif action == "search-wanted": - wanted_search_missing_subtitles_series() - return '', 204 - - return '', 400 - - -class Episodes(Resource): - @authenticate - def get(self): - seriesId = request.args.getlist('seriesid[]') - episodeId = request.args.getlist('episodeid[]') - - if len(episodeId) > 0: - result = TableEpisodes.select().where(TableEpisodes.sonarrEpisodeId.in_(episodeId)).dicts() - elif len(seriesId) > 0: - result = TableEpisodes.select()\ - .where(TableEpisodes.sonarrSeriesId.in_(seriesId))\ - .order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\ - .dicts() - else: - return "Series or Episode ID not provided", 400 - - result = list(result) - for item in result: - postprocessEpisode(item) - - return jsonify(data=result) - - -# PATCH: Download Subtitles -# POST: Upload Subtitles -# DELETE: Delete Subtitles -class EpisodesSubtitles(Resource): - @authenticate - def patch(self): - sonarrSeriesId = request.args.get('seriesid') - sonarrEpisodeId = request.args.get('episodeid') - episodeInfo = TableEpisodes.select(TableEpisodes.title, - TableEpisodes.path, - TableEpisodes.scene_name, - TableEpisodes.audio_language)\ - .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ - .dicts()\ - .get() - - title = episodeInfo['title'] - episodePath = path_mappings.path_replace(episodeInfo['path']) - sceneName = episodeInfo['scene_name'] - audio_language = episodeInfo['audio_language'] - if sceneName is None: sceneName = "None" - - language = request.form.get('language') - hi = request.form.get('hi').capitalize() - forced = request.form.get('forced').capitalize() - - providers_list = get_providers() - providers_auth = get_providers_auth() - - audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = None - - try: - result = download_subtitle(episodePath, language, audio_language, hi, forced, providers_list, - providers_auth, sceneName, title, 'series') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id, - subs_path) - send_notifications(sonarrSeriesId, sonarrEpisodeId, message) - store_subtitles(path, episodePath) - else: - event_stream(type='episode', payload=sonarrEpisodeId) - - except OSError: - pass - - return '', 204 - - @authenticate - def post(self): - sonarrSeriesId = request.args.get('seriesid') - sonarrEpisodeId = request.args.get('episodeid') - episodeInfo = TableEpisodes.select(TableEpisodes.title, - TableEpisodes.path, - TableEpisodes.scene_name, - TableEpisodes.audio_language)\ - .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ - .dicts()\ - .get() - - title = episodeInfo['title'] - episodePath = path_mappings.path_replace(episodeInfo['path']) - sceneName = episodeInfo['scene_name'] - audio_language = episodeInfo['audio_language'] - if sceneName is None: sceneName = "None" - - language = request.form.get('language') - forced = True if request.form.get('forced') == 'true' else False - hi = True if request.form.get('hi') == 'true' else False - subFile = request.files.get('file') - - _, ext = os.path.splitext(subFile.filename) - - if ext not in SUBTITLE_EXTENSIONS: - raise ValueError('A subtitle of an invalid format was uploaded.') - - try: - result = manual_upload_subtitle(path=episodePath, - language=language, - forced=forced, - hi=hi, - title=title, - scene_name=sceneName, - media_type='series', - subtitle=subFile, - audio_language=audio_language) - - if result is not None: - message = result[0] - path = result[1] - subs_path = result[2] - if hi: - language_code = language + ":hi" - elif forced: - language_code = language + ":forced" - else: - language_code = language - provider = "manual" - score = 360 - history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, - subtitles_path=subs_path) - if not settings.general.getboolean('dont_notify_manual_actions'): - send_notifications(sonarrSeriesId, sonarrEpisodeId, message) - store_subtitles(path, episodePath) - - except OSError: - pass - - return '', 204 - - @authenticate - def delete(self): - sonarrSeriesId = request.args.get('seriesid') - sonarrEpisodeId = request.args.get('episodeid') - episodeInfo = TableEpisodes.select(TableEpisodes.title, - TableEpisodes.path, - TableEpisodes.scene_name, - TableEpisodes.audio_language)\ - .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ - .dicts()\ - .get() - - episodePath = path_mappings.path_replace(episodeInfo['path']) - - language = request.form.get('language') - forced = request.form.get('forced') - hi = request.form.get('hi') - subtitlesPath = request.form.get('path') - - subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath) - - result = delete_subtitles(media_type='series', - language=language, - forced=forced, - hi=hi, - media_path=episodePath, - subtitles_path=subtitlesPath, - sonarr_series_id=sonarrSeriesId, - sonarr_episode_id=sonarrEpisodeId) - - return '', 204 - - -class Movies(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - radarrId = request.args.getlist('radarrid[]') - - count = TableMovies.select().count() - - if len(radarrId) != 0: - result = TableMovies.select()\ - .where(TableMovies.radarrId.in_(radarrId))\ - .order_by(TableMovies.sortTitle)\ - .dicts() - else: - result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts() - result = list(result) - for item in result: - postprocessMovie(item) - - return jsonify(data=result, total=count) - - @authenticate - def post(self): - radarrIdList = request.form.getlist('radarrid') - profileIdList = request.form.getlist('profileid') - - for idx in range(len(radarrIdList)): - radarrId = radarrIdList[idx] - profileId = profileIdList[idx] - - if profileId in None_Keys: - profileId = None - else: - try: - profileId = int(profileId) - except Exception: - return '', 400 - - TableMovies.update({ - TableMovies.profileId: profileId - })\ - .where(TableMovies.radarrId == radarrId)\ - .execute() - - list_missing_subtitles_movies(no=radarrId, send_event=False) - - event_stream(type='movie', payload=radarrId) - event_stream(type='movie-wanted', payload=radarrId) - event_stream(type='badges') - - return '', 204 - - @authenticate - def patch(self): - radarrid = request.form.get('radarrid') - action = request.form.get('action') - if action == "scan-disk": - movies_scan_subtitles(radarrid) - return '', 204 - elif action == "search-missing": - movies_download_subtitles(radarrid) - return '', 204 - elif action == "search-wanted": - wanted_search_missing_subtitles_movies() - return '', 204 - - return '', 400 - - -""" -:param language: Alpha2 language code -""" - - -class MoviesSubtitles(Resource): - @authenticate - def patch(self): - # Download - radarrId = request.args.get('radarrid') - - movieInfo = TableMovies.select(TableMovies.title, - TableMovies.path, - TableMovies.sceneName, - TableMovies.audio_language)\ - .where(TableMovies.radarrId == radarrId)\ - .dicts()\ - .get() - - moviePath = path_mappings.path_replace_movie(movieInfo['path']) - sceneName = movieInfo['sceneName'] - if sceneName is None: sceneName = 'None' - - title = movieInfo['title'] - audio_language = movieInfo['audio_language'] - - language = request.form.get('language') - hi = request.form.get('hi').capitalize() - forced = request.form.get('forced').capitalize() - - providers_list = get_providers() - providers_auth = get_providers_auth() - - audio_language_list = get_audio_profile_languages(movie_id=radarrId) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = None - - try: - result = download_subtitle(moviePath, language, audio_language, hi, forced, providers_list, - providers_auth, sceneName, title, 'movie') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - history_log_movie(1, radarrId, message, path, language_code, provider, score, subs_id, subs_path) - send_notifications_movie(radarrId, message) - store_subtitles_movie(path, moviePath) - else: - event_stream(type='movie', payload=radarrId) - except OSError: - pass - - return '', 204 - - @authenticate - def post(self): - # Upload - # TODO: Support Multiply Upload - radarrId = request.args.get('radarrid') - movieInfo = TableMovies.select(TableMovies.title, - TableMovies.path, - TableMovies.sceneName, - TableMovies.audio_language) \ - .where(TableMovies.radarrId == radarrId) \ - .dicts() \ - .get() - - moviePath = path_mappings.path_replace_movie(movieInfo['path']) - sceneName = movieInfo['sceneName'] - if sceneName is None: sceneName = 'None' - - title = movieInfo['title'] - audioLanguage = movieInfo['audio_language'] - - language = request.form.get('language') - forced = True if request.form.get('forced') == 'true' else False - hi = True if request.form.get('hi') == 'true' else False - subFile = request.files.get('file') - - _, ext = os.path.splitext(subFile.filename) - - if ext not in SUBTITLE_EXTENSIONS: - raise ValueError('A subtitle of an invalid format was uploaded.') - - try: - result = manual_upload_subtitle(path=moviePath, - language=language, - forced=forced, - hi=hi, - title=title, - scene_name=sceneName, - media_type='movie', - subtitle=subFile, - audio_language=audioLanguage) - - if result is not None: - message = result[0] - path = result[1] - subs_path = result[2] - if hi: - language_code = language + ":hi" - elif forced: - language_code = language + ":forced" - else: - language_code = language - provider = "manual" - score = 120 - history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path) - if not settings.general.getboolean('dont_notify_manual_actions'): - send_notifications_movie(radarrId, message) - store_subtitles_movie(path, moviePath) - except OSError: - pass - - return '', 204 - - @authenticate - def delete(self): - # Delete - radarrId = request.args.get('radarrid') - movieInfo = TableMovies.select(TableMovies.path) \ - .where(TableMovies.radarrId == radarrId) \ - .dicts() \ - .get() - - moviePath = path_mappings.path_replace_movie(movieInfo['path']) - - language = request.form.get('language') - forced = request.form.get('forced') - hi = request.form.get('hi') - subtitlesPath = request.form.get('path') - - subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath) - - result = delete_subtitles(media_type='movie', - language=language, - forced=forced, - hi=hi, - media_path=moviePath, - subtitles_path=subtitlesPath, - radarr_id=radarrId) - if result: - return '', 202 - else: - return '', 204 - - -class Providers(Resource): - @authenticate - def get(self): - history = request.args.get('history') - if history and history not in False_Keys: - providers = list(TableHistory.select(TableHistory.provider) - .where(TableHistory.provider != None and TableHistory.provider != "manual") - .dicts()) - providers += list(TableHistoryMovie.select(TableHistoryMovie.provider) - .where(TableHistoryMovie.provider != None and TableHistoryMovie.provider != "manual") - .dicts()) - providers_list = list(set([x['provider'] for x in providers])) - providers_dicts = [] - for provider in providers_list: - providers_dicts.append({ - 'name': provider, - 'status': 'History', - 'retry': '-' - }) - return jsonify(data=sorted(providers_dicts, key=itemgetter('name'))) - - throttled_providers = list_throttled_providers() - - providers = list() - for provider in throttled_providers: - providers.append({ - "name": provider[0], - "status": provider[1] if provider[1] is not None else "Good", - "retry": provider[2] if provider[2] != "now" else "-" - }) - return jsonify(data=providers) - - @authenticate - def post(self): - action = request.form.get('action') - - if action == 'reset': - reset_throttled_providers() - return '', 204 - - return '', 400 - - -class ProviderMovies(Resource): - @authenticate - def get(self): - # Manual Search - radarrId = request.args.get('radarrid') - movieInfo = TableMovies.select(TableMovies.title, - TableMovies.path, - TableMovies.sceneName, - TableMovies.profileId) \ - .where(TableMovies.radarrId == radarrId) \ - .dicts() \ - .get() - - title = movieInfo['title'] - moviePath = path_mappings.path_replace_movie(movieInfo['path']) - sceneName = movieInfo['sceneName'] - profileId = movieInfo['profileId'] - if sceneName is None: sceneName = "None" - - providers_list = get_providers() - providers_auth = get_providers_auth() - - data = manual_search(moviePath, profileId, providers_list, providers_auth, sceneName, title, - 'movie') - if not data: - data = [] - return jsonify(data=data) - - @authenticate - def post(self): - # Manual Download - radarrId = request.args.get('radarrid') - movieInfo = TableMovies.select(TableMovies.title, - TableMovies.path, - TableMovies.sceneName, - TableMovies.audio_language) \ - .where(TableMovies.radarrId == radarrId) \ - .dicts() \ - .get() - - title = movieInfo['title'] - moviePath = path_mappings.path_replace_movie(movieInfo['path']) - sceneName = movieInfo['sceneName'] - if sceneName is None: sceneName = "None" - audio_language = movieInfo['audio_language'] - - language = request.form.get('language') - hi = request.form.get('hi').capitalize() - forced = request.form.get('forced').capitalize() - selected_provider = request.form.get('provider') - subtitle = request.form.get('subtitle') - - providers_auth = get_providers_auth() - - audio_language_list = get_audio_profile_languages(movie_id=radarrId) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = 'None' - - try: - result = manual_download_subtitle(moviePath, language, audio_language, hi, forced, subtitle, - selected_provider, providers_auth, sceneName, title, 'movie') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path) - if not settings.general.getboolean('dont_notify_manual_actions'): - send_notifications_movie(radarrId, message) - store_subtitles_movie(path, moviePath) - except OSError: - pass - - return '', 204 - - -class ProviderEpisodes(Resource): - @authenticate - def get(self): - # Manual Search - sonarrEpisodeId = request.args.get('episodeid') - episodeInfo = TableEpisodes.select(TableEpisodes.title, - TableEpisodes.path, - TableEpisodes.scene_name, - TableShows.profileId) \ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \ - .dicts() \ - .get() - - title = episodeInfo['title'] - episodePath = path_mappings.path_replace(episodeInfo['path']) - sceneName = episodeInfo['scene_name'] - profileId = episodeInfo['profileId'] - if sceneName is None: sceneName = "None" - - providers_list = get_providers() - providers_auth = get_providers_auth() - - data = manual_search(episodePath, profileId, providers_list, providers_auth, sceneName, title, - 'series') - if not data: - data = [] - return jsonify(data=data) - - @authenticate - def post(self): - # Manual Download - sonarrSeriesId = request.args.get('seriesid') - sonarrEpisodeId = request.args.get('episodeid') - episodeInfo = TableEpisodes.select(TableEpisodes.title, - TableEpisodes.path, - TableEpisodes.scene_name) \ - .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \ - .dicts() \ - .get() - - title = episodeInfo['title'] - episodePath = path_mappings.path_replace(episodeInfo['path']) - sceneName = episodeInfo['scene_name'] - if sceneName is None: sceneName = "None" - - language = request.form.get('language') - hi = request.form.get('hi').capitalize() - forced = request.form.get('forced').capitalize() - selected_provider = request.form.get('provider') - subtitle = request.form.get('subtitle') - providers_auth = get_providers_auth() - - audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = 'None' - - try: - result = manual_download_subtitle(episodePath, language, audio_language, hi, forced, subtitle, - selected_provider, providers_auth, sceneName, title, 'series') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id, - subs_path) - if not settings.general.getboolean('dont_notify_manual_actions'): - send_notifications(sonarrSeriesId, sonarrEpisodeId, message) - store_subtitles(path, episodePath) - return result, 201 - except OSError: - pass - - return '', 204 - - -class EpisodesHistory(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - episodeid = request.args.get('episodeid') - - upgradable_episodes_not_perfect = [] - if settings.general.getboolean('upgrade_subs'): - days_to_upgrade_subs = settings.general.days_to_upgrade_subs - minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) - - datetime.datetime(1970, 1, 1)).total_seconds() - - if settings.general.getboolean('upgrade_manual'): - query_actions = [1, 2, 3, 6] - else: - query_actions = [1, 3] - - upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)), - (TableHistory.timestamp > minimum_timestamp), - (TableHistory.score is not None)] - upgradable_episodes_conditions += get_exclusion_clause('series') - upgradable_episodes = TableHistory.select(TableHistory.video_path, - fn.MAX(TableHistory.timestamp).alias('timestamp'), - TableHistory.score, - TableShows.tags, - TableEpisodes.monitored, - TableShows.seriesType)\ - .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ - .join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(reduce(operator.and_, upgradable_episodes_conditions))\ - .group_by(TableHistory.video_path)\ - .dicts() - upgradable_episodes = list(upgradable_episodes) - for upgradable_episode in upgradable_episodes: - if upgradable_episode['timestamp'] > minimum_timestamp: - try: - int(upgradable_episode['score']) - except ValueError: - pass - else: - if int(upgradable_episode['score']) < 360: - upgradable_episodes_not_perfect.append(upgradable_episode) - - query_conditions = [(TableEpisodes.title is not None)] - if episodeid: - query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid)) - query_condition = reduce(operator.and_, query_conditions) - episode_history = TableHistory.select(TableHistory.id, - TableShows.title.alias('seriesTitle'), - TableEpisodes.monitored, - TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), - TableEpisodes.title.alias('episodeTitle'), - TableHistory.timestamp, - TableHistory.subs_id, - TableHistory.description, - TableHistory.sonarrSeriesId, - TableEpisodes.path, - TableHistory.language, - TableHistory.score, - TableShows.tags, - TableHistory.action, - TableHistory.subtitles_path, - TableHistory.sonarrEpisodeId, - TableHistory.provider, - TableShows.seriesType)\ - .join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ - .where(query_condition)\ - .order_by(TableHistory.timestamp.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - episode_history = list(episode_history) - - blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts() - blacklist_db = list(blacklist_db) - - for item in episode_history: - # Mark episode as upgradable or not - item.update({"upgradable": False}) - if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), - "tags": str(item['tags']), "monitored": str(item['monitored']), - "seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: - if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])): - item.update({"upgradable": True}) - - del item['path'] - - postprocessEpisode(item) - - if item['score']: - item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%" - - # Make timestamp pretty - if item['timestamp']: - item["raw_timestamp"] = int(item['timestamp']) - item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') - item['timestamp'] = pretty.date(item["raw_timestamp"]) - - # Check if subtitles is blacklisted - item.update({"blacklisted": False}) - if item['action'] not in [0, 4, 5]: - for blacklisted_item in blacklist_db: - if blacklisted_item['provider'] == item['provider'] and \ - blacklisted_item['subs_id'] == item['subs_id']: - item.update({"blacklisted": True}) - break - - count = TableHistory.select()\ - .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ - .where(TableEpisodes.title is not None).count() - - return jsonify(data=episode_history, total=count) - - -class MoviesHistory(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - radarrid = request.args.get('radarrid') - - upgradable_movies = [] - upgradable_movies_not_perfect = [] - if settings.general.getboolean('upgrade_subs'): - days_to_upgrade_subs = settings.general.days_to_upgrade_subs - minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) - - datetime.datetime(1970, 1, 1)).total_seconds() - - if settings.general.getboolean('upgrade_manual'): - query_actions = [1, 2, 3, 6] - else: - query_actions = [1, 3] - - upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)), - (TableHistoryMovie.timestamp > minimum_timestamp), - (TableHistoryMovie.score is not None)] - upgradable_movies_conditions += get_exclusion_clause('movie') - upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path, - fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'), - TableHistoryMovie.score, - TableMovies.tags, - TableMovies.monitored)\ - .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ - .where(reduce(operator.and_, upgradable_movies_conditions))\ - .group_by(TableHistoryMovie.video_path)\ - .dicts() - upgradable_movies = list(upgradable_movies) - - for upgradable_movie in upgradable_movies: - if upgradable_movie['timestamp'] > minimum_timestamp: - try: - int(upgradable_movie['score']) - except ValueError: - pass - else: - if int(upgradable_movie['score']) < 120: - upgradable_movies_not_perfect.append(upgradable_movie) - - query_conditions = [(TableMovies.title is not None)] - if radarrid: - query_conditions.append((TableMovies.radarrId == radarrid)) - query_condition = reduce(operator.and_, query_conditions) - - movie_history = TableHistoryMovie.select(TableHistoryMovie.id, - TableHistoryMovie.action, - TableMovies.title, - TableHistoryMovie.timestamp, - TableHistoryMovie.description, - TableHistoryMovie.radarrId, - TableMovies.monitored, - TableHistoryMovie.video_path.alias('path'), - TableHistoryMovie.language, - TableMovies.tags, - TableHistoryMovie.score, - TableHistoryMovie.subs_id, - TableHistoryMovie.provider, - TableHistoryMovie.subtitles_path)\ - .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ - .where(query_condition)\ - .order_by(TableHistoryMovie.timestamp.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - movie_history = list(movie_history) - - blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts() - blacklist_db = list(blacklist_db) - - for item in movie_history: - # Mark movies as upgradable or not - item.update({"upgradable": False}) - if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), - "tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: - if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])): - item.update({"upgradable": True}) - - del item['path'] - - postprocessMovie(item) - - if item['score']: - item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%" - - # Make timestamp pretty - if item['timestamp']: - item["raw_timestamp"] = int(item['timestamp']) - item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') - item['timestamp'] = pretty.date(item["raw_timestamp"]) - - # Check if subtitles is blacklisted - item.update({"blacklisted": False}) - if item['action'] not in [0, 4, 5]: - for blacklisted_item in blacklist_db: - if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[ - 'subs_id']: - item.update({"blacklisted": True}) - break - - count = TableHistoryMovie.select()\ - .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ - .where(TableMovies.title is not None)\ - .count() - - return jsonify(data=movie_history, total=count) - - -class HistoryStats(Resource): - @authenticate - def get(self): - timeframe = request.args.get('timeframe') or 'month' - action = request.args.get('action') or 'All' - provider = request.args.get('provider') or 'All' - language = request.args.get('language') or 'All' - - # timeframe must be in ['week', 'month', 'trimester', 'year'] - if timeframe == 'year': - delay = 364 * 24 * 60 * 60 - elif timeframe == 'trimester': - delay = 90 * 24 * 60 * 60 - elif timeframe == 'month': - delay = 30 * 24 * 60 * 60 - elif timeframe == 'week': - delay = 6 * 24 * 60 * 60 - - now = time.time() - past = now - delay - - history_where_clauses = [(TableHistory.timestamp.between(past, now))] - history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))] - - if action != 'All': - history_where_clauses.append((TableHistory.action == action)) - history_where_clauses_movie.append((TableHistoryMovie.action == action)) - else: - history_where_clauses.append((TableHistory.action.in_([1, 2, 3]))) - history_where_clauses_movie.append((TableHistoryMovie.action.in_([1, 2, 3]))) - - if provider != 'All': - history_where_clauses.append((TableHistory.provider == provider)) - history_where_clauses_movie.append((TableHistoryMovie.provider == provider)) - - if language != 'All': - history_where_clauses.append((TableHistory.language == language)) - history_where_clauses_movie.append((TableHistoryMovie.language == language)) - - history_where_clause = reduce(operator.and_, history_where_clauses) - history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie) - - data_series = TableHistory.select(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch').alias('date'), - fn.COUNT(TableHistory.id).alias('count'))\ - .where(history_where_clause) \ - .group_by(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch'))\ - .dicts() - data_series = list(data_series) - - data_movies = TableHistoryMovie.select(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch').alias('date'), - fn.COUNT(TableHistoryMovie.id).alias('count')) \ - .where(history_where_clause_movie) \ - .group_by(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch')) \ - .dicts() - data_movies = list(data_movies) - - for dt in rrule.rrule(rrule.DAILY, - dtstart=datetime.datetime.now() - datetime.timedelta(seconds=delay), - until=datetime.datetime.now()): - if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_series): - data_series.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0}) - if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_movies): - data_movies.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0}) - - sorted_data_series = sorted(data_series, key=lambda i: i['date']) - sorted_data_movies = sorted(data_movies, key=lambda i: i['date']) - - return jsonify(series=sorted_data_series, movies=sorted_data_movies) - - -# GET: Get Wanted Episodes -class EpisodesWanted(Resource): - @authenticate - def get(self): - episodeid = request.args.getlist('episodeid[]') - - wanted_conditions = [(TableEpisodes.missing_subtitles != '[]')] - if len(episodeid) > 0: - wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid)) - wanted_conditions += get_exclusion_clause('series') - wanted_condition = reduce(operator.and_, wanted_conditions) - - if len(episodeid) > 0: - data = TableEpisodes.select(TableShows.title.alias('seriesTitle'), - TableEpisodes.monitored, - TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), - TableEpisodes.title.alias('episodeTitle'), - TableEpisodes.missing_subtitles, - TableEpisodes.sonarrSeriesId, - TableEpisodes.sonarrEpisodeId, - TableEpisodes.scene_name.alias('sceneName'), - TableShows.tags, - TableEpisodes.failedAttempts, - TableShows.seriesType)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(wanted_condition)\ - .dicts() - else: - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - data = TableEpisodes.select(TableShows.title.alias('seriesTitle'), - TableEpisodes.monitored, - TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), - TableEpisodes.title.alias('episodeTitle'), - TableEpisodes.missing_subtitles, - TableEpisodes.sonarrSeriesId, - TableEpisodes.sonarrEpisodeId, - TableEpisodes.scene_name.alias('sceneName'), - TableShows.tags, - TableEpisodes.failedAttempts, - TableShows.seriesType)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(wanted_condition)\ - .order_by(TableEpisodes.rowid.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - data = list(data) - - for item in data: - postprocessEpisode(item) - - count_conditions = [(TableEpisodes.missing_subtitles != '[]')] - count_conditions += get_exclusion_clause('series') - count = TableEpisodes.select(TableShows.tags, - TableShows.seriesType, - TableEpisodes.monitored)\ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ - .where(reduce(operator.and_, count_conditions))\ - .count() - - return jsonify(data=data, total=count) - - -# GET: Get Wanted Movies -class MoviesWanted(Resource): - @authenticate - def get(self): - radarrid = request.args.getlist("radarrid[]") - - wanted_conditions = [(TableMovies.missing_subtitles != '[]')] - if len(radarrid) > 0: - wanted_conditions.append((TableMovies.radarrId.in_(radarrid))) - wanted_conditions += get_exclusion_clause('movie') - wanted_condition = reduce(operator.and_, wanted_conditions) - - if len(radarrid) > 0: - result = TableMovies.select(TableMovies.title, - TableMovies.missing_subtitles, - TableMovies.radarrId, - TableMovies.sceneName, - TableMovies.failedAttempts, - TableMovies.tags, - TableMovies.monitored)\ - .where(wanted_condition)\ - .dicts() - else: - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - result = TableMovies.select(TableMovies.title, - TableMovies.missing_subtitles, - TableMovies.radarrId, - TableMovies.sceneName, - TableMovies.failedAttempts, - TableMovies.tags, - TableMovies.monitored)\ - .where(wanted_condition)\ - .order_by(TableMovies.rowid.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - result = list(result) - - for item in result: - postprocessMovie(item) - - count_conditions = [(TableMovies.missing_subtitles != '[]')] - count_conditions += get_exclusion_clause('movie') - count = TableMovies.select(TableMovies.monitored, - TableMovies.tags)\ - .where(reduce(operator.and_, count_conditions))\ - .count() - - return jsonify(data=result, total=count) - - -# GET: get blacklist -# POST: add blacklist -# DELETE: remove blacklist -class EpisodesBlacklist(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - - data = TableBlacklist.select(TableShows.title.alias('seriesTitle'), - TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), - TableEpisodes.title.alias('episodeTitle'), - TableEpisodes.sonarrSeriesId, - TableBlacklist.provider, - TableBlacklist.subs_id, - TableBlacklist.language, - TableBlacklist.timestamp)\ - .join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\ - .join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\ - .order_by(TableBlacklist.timestamp.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - data = list(data) - - for item in data: - # Make timestamp pretty - item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') - item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))}) - - postprocessEpisode(item) - - return jsonify(data=data) - - @authenticate - def post(self): - sonarr_series_id = int(request.args.get('seriesid')) - sonarr_episode_id = int(request.args.get('episodeid')) - provider = request.form.get('provider') - subs_id = request.form.get('subs_id') - language = request.form.get('language') - - episodeInfo = TableEpisodes.select(TableEpisodes.path)\ - .where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\ - .dicts()\ - .get() - - media_path = episodeInfo['path'] - subtitles_path = request.form.get('subtitles_path') - - blacklist_log(sonarr_series_id=sonarr_series_id, - sonarr_episode_id=sonarr_episode_id, - provider=provider, - subs_id=subs_id, - language=language) - delete_subtitles(media_type='series', - language=language, - forced=False, - hi=False, - media_path=path_mappings.path_replace(media_path), - subtitles_path=subtitles_path, - sonarr_series_id=sonarr_series_id, - sonarr_episode_id=sonarr_episode_id) - episode_download_subtitles(sonarr_episode_id) - event_stream(type='episode-history') - return '', 200 - - @authenticate - def delete(self): - if request.args.get("all") == "true": - blacklist_delete_all() - else: - provider = request.form.get('provider') - subs_id = request.form.get('subs_id') - blacklist_delete(provider=provider, subs_id=subs_id) - return '', 204 - - -# GET: get blacklist -# POST: add blacklist -# DELETE: remove blacklist -class MoviesBlacklist(Resource): - @authenticate - def get(self): - start = request.args.get('start') or 0 - length = request.args.get('length') or -1 - - data = TableBlacklistMovie.select(TableMovies.title, - TableMovies.radarrId, - TableBlacklistMovie.provider, - TableBlacklistMovie.subs_id, - TableBlacklistMovie.language, - TableBlacklistMovie.timestamp)\ - .join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\ - .order_by(TableBlacklistMovie.timestamp.desc())\ - .limit(length)\ - .offset(start)\ - .dicts() - data = list(data) - - for item in data: - postprocessMovie(item) - - # Make timestamp pretty - item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') - item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))}) - - return jsonify(data=data) - - @authenticate - def post(self): - radarr_id = int(request.args.get('radarrid')) - provider = request.form.get('provider') - subs_id = request.form.get('subs_id') - language = request.form.get('language') - # TODO - forced = False - hi = False - - data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get() - - media_path = data['path'] - subtitles_path = request.form.get('subtitles_path') - - blacklist_log_movie(radarr_id=radarr_id, - provider=provider, - subs_id=subs_id, - language=language) - delete_subtitles(media_type='movie', - language=language, - forced=forced, - hi=hi, - media_path=path_mappings.path_replace_movie(media_path), - subtitles_path=subtitles_path, - radarr_id=radarr_id) - movies_download_subtitles(radarr_id) - event_stream(type='movie-history') - return '', 200 - - @authenticate - def delete(self): - if request.args.get("all") == "true": - blacklist_delete_all_movie() - else: - provider = request.form.get('provider') - subs_id = request.form.get('subs_id') - blacklist_delete_movie(provider=provider, subs_id=subs_id) - return '', 200 - - -class Subtitles(Resource): - @authenticate - def patch(self): - action = request.args.get('action') - - language = request.form.get('language') - subtitles_path = request.form.get('path') - media_type = request.form.get('type') - id = request.form.get('id') - - if media_type == 'episode': - subtitles_path = path_mappings.path_replace(subtitles_path) - metadata = TableEpisodes.select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)\ - .where(TableEpisodes.sonarrEpisodeId == id)\ - .dicts()\ - .get() - video_path = path_mappings.path_replace(metadata['path']) - else: - subtitles_path = path_mappings.path_replace_movie(subtitles_path) - metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get() - video_path = path_mappings.path_replace_movie(metadata['path']) - - if action == 'sync': - if media_type == 'episode': - subsync.sync(video_path=video_path, srt_path=subtitles_path, - srt_lang=language, media_type='series', sonarr_series_id=metadata['sonarrSeriesId'], - sonarr_episode_id=int(id)) - else: - subsync.sync(video_path=video_path, srt_path=subtitles_path, - srt_lang=language, media_type='movies', radarr_id=id) - elif action == 'translate': - dest_language = language - forced = True if request.form.get('forced') == 'true' else False - hi = True if request.form.get('hi') == 'true' else False - result = translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path, - to_lang=dest_language, - forced=forced, hi=hi) - if result: - if media_type == 'episode': - store_subtitles(path_mappings.path_replace_reverse(video_path), video_path) - else: - store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path) - return '', 200 - else: - return '', 404 - else: - subtitles_apply_mods(language, subtitles_path, [action]) - - # apply chmod if required - chmod = int(settings.general.chmod, 8) if not sys.platform.startswith( - 'win') and settings.general.getboolean('chmod_enabled') else None - if chmod: - os.chmod(subtitles_path, chmod) - - return '', 204 - - -class SubtitleNameInfo(Resource): - @authenticate - def get(self): - names = request.args.getlist('filenames[]') - results = [] - for name in names: - opts = dict() - opts['type'] = 'episode' - guessit_result = guessit(name, options=opts) - result = {} - result['filename'] = name - if 'subtitle_language' in guessit_result: - result['subtitle_language'] = str(guessit_result['subtitle_language']) - - result['episode'] = 0 - if 'episode' in guessit_result: - if isinstance(guessit_result['episode'], list): - # for multiple episodes file, choose the first episode number - if len(guessit_result['episode']): - # make sure that guessit returned a list of more than 0 items - result['episode'] = int(guessit_result['episode'][0]) - elif isinstance(guessit_result['episode'], (str, int)): - # if single episode (should be int but just in case we cast it to int) - result['episode'] = int(guessit_result['episode']) - - if 'season' in guessit_result: - result['season'] = int(guessit_result['season']) - else: - result['season'] = 0 - - results.append(result) - - return jsonify(data=results) - - -class BrowseBazarrFS(Resource): - @authenticate - def get(self): - path = request.args.get('path') or '' - data = [] - try: - result = browse_bazarr_filesystem(path) - if result is None: - raise ValueError - except Exception: - return jsonify([]) - for item in result['directories']: - data.append({'name': item['name'], 'children': True, 'path': item['path']}) - return jsonify(data) - - -class BrowseSonarrFS(Resource): - @authenticate - def get(self): - path = request.args.get('path') or '' - data = [] - try: - result = browse_sonarr_filesystem(path) - if result is None: - raise ValueError - except Exception: - return jsonify([]) - for item in result['directories']: - data.append({'name': item['name'], 'children': True, 'path': item['path']}) - return jsonify(data) - - -class BrowseRadarrFS(Resource): - @authenticate - def get(self): - path = request.args.get('path') or '' - data = [] - try: - result = browse_radarr_filesystem(path) - if result is None: - raise ValueError - except Exception: - return jsonify([]) - for item in result['directories']: - data.append({'name': item['name'], 'children': True, 'path': item['path']}) - return jsonify(data) - - -class WebHooksPlex(Resource): - @authenticate - def post(self): - json_webhook = request.form.get('payload') - parsed_json_webhook = json.loads(json_webhook) - - event = parsed_json_webhook['event'] - if event not in ['media.play']: - return '', 204 - - media_type = parsed_json_webhook['Metadata']['type'] - - if media_type == 'episode': - season = parsed_json_webhook['Metadata']['parentIndex'] - episode = parsed_json_webhook['Metadata']['index'] - else: - season = episode = None - - ids = [] - for item in parsed_json_webhook['Metadata']['Guid']: - splitted_id = item['id'].split('://') - if len(splitted_id) == 2: - ids.append({splitted_id[0]: splitted_id[1]}) - if not ids: - return '', 404 - - if media_type == 'episode': - try: - episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0] - r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id), - headers={"User-Agent": os.environ["SZ_USER_AGENT"]}) - soup = bso(r.content, "html.parser") - series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2] - except: - return '', 404 - else: - sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \ - .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \ - .where(TableShows.imdbId == series_imdb_id, - TableEpisodes.season == season, - TableEpisodes.episode == episode) \ - .dicts() \ - .get() - - if sonarrEpisodeId: - episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True) - else: - try: - movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0] - except: - return '', 404 - else: - radarrId = TableMovies.select(TableMovies.radarrId)\ - .where(TableMovies.imdbId == movie_imdb_id)\ - .dicts()\ - .get() - if radarrId: - movies_download_subtitles(no=radarrId['radarrId']) - - return '', 200 - - -api.add_resource(Badges, '/badges') - -api.add_resource(Providers, '/providers') -api.add_resource(ProviderMovies, '/providers/movies') -api.add_resource(ProviderEpisodes, '/providers/episodes') - -api.add_resource(System, '/system') -api.add_resource(Searches, "/system/searches") -api.add_resource(SystemAccount, '/system/account') -api.add_resource(SystemTasks, '/system/tasks') -api.add_resource(SystemLogs, '/system/logs') -api.add_resource(SystemStatus, '/system/status') -api.add_resource(SystemHealth, '/system/health') -api.add_resource(SystemReleases, '/system/releases') -api.add_resource(SystemSettings, '/system/settings') -api.add_resource(Languages, '/system/languages') -api.add_resource(LanguagesProfiles, '/system/languages/profiles') -api.add_resource(Notifications, '/system/notifications') - -api.add_resource(Subtitles, '/subtitles') -api.add_resource(SubtitleNameInfo, '/subtitles/info') - -api.add_resource(Series, '/series') - -api.add_resource(Episodes, '/episodes') -api.add_resource(EpisodesWanted, '/episodes/wanted') -api.add_resource(EpisodesSubtitles, '/episodes/subtitles') -api.add_resource(EpisodesHistory, '/episodes/history') -api.add_resource(EpisodesBlacklist, '/episodes/blacklist') - -api.add_resource(Movies, '/movies') -api.add_resource(MoviesWanted, '/movies/wanted') -api.add_resource(MoviesSubtitles, '/movies/subtitles') -api.add_resource(MoviesHistory, '/movies/history') -api.add_resource(MoviesBlacklist, '/movies/blacklist') - -api.add_resource(HistoryStats, '/history/stats') - -api.add_resource(BrowseBazarrFS, '/files') -api.add_resource(BrowseSonarrFS, '/files/sonarr') -api.add_resource(BrowseRadarrFS, '/files/radarr') - -api.add_resource(WebHooksPlex, '/webhooks/plex') diff --git a/bazarr/api/__init__.py b/bazarr/api/__init__.py new file mode 100644 index 000000000..85d075435 --- /dev/null +++ b/bazarr/api/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 + +from .badges import api_bp_badges +from .system import api_bp_system +from .series import api_bp_series +from .episodes import api_bp_episodes +from .providers import api_bp_providers +from .subtitles import api_bp_subtitles +from .webhooks import api_bp_webhooks +from .history import api_bp_history +from .files import api_bp_files +from .movies import api_bp_movies + +api_bp_list = [ + api_bp_badges, + api_bp_system, + api_bp_series, + api_bp_episodes, + api_bp_providers, + api_bp_subtitles, + api_bp_webhooks, + api_bp_history, + api_bp_files, + api_bp_movies +] diff --git a/bazarr/api/badges/__init__.py b/bazarr/api/badges/__init__.py new file mode 100644 index 000000000..e5254c9b9 --- /dev/null +++ b/bazarr/api/badges/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .badges import Badges + + +api_bp_badges = Blueprint('api_badges', __name__) +api = Api(api_bp_badges) + +api.add_resource(Badges, '/badges') diff --git a/bazarr/api/badges/badges.py b/bazarr/api/badges/badges.py new file mode 100644 index 000000000..e9d6561f3 --- /dev/null +++ b/bazarr/api/badges/badges.py @@ -0,0 +1,47 @@ +# coding=utf-8 + +from flask import jsonify +from flask_restful import Resource + +import operator +from functools import reduce + +from database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies +from get_providers import get_throttled_providers +from utils import get_health_issues + +from ..utils import authenticate + + +class Badges(Resource): + @authenticate + def get(self): + episodes_conditions = [(TableEpisodes.missing_subtitles is not None), + (TableEpisodes.missing_subtitles != '[]')] + episodes_conditions += get_exclusion_clause('series') + missing_episodes = TableEpisodes.select(TableShows.tags, + TableShows.seriesType, + TableEpisodes.monitored)\ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(reduce(operator.and_, episodes_conditions))\ + .count() + + movies_conditions = [(TableMovies.missing_subtitles is not None), + (TableMovies.missing_subtitles != '[]')] + movies_conditions += get_exclusion_clause('movie') + missing_movies = TableMovies.select(TableMovies.tags, + TableMovies.monitored)\ + .where(reduce(operator.and_, movies_conditions))\ + .count() + + throttled_providers = len(get_throttled_providers()) + + health_issues = len(get_health_issues()) + + result = { + "episodes": missing_episodes, + "movies": missing_movies, + "providers": throttled_providers, + "status": health_issues + } + return jsonify(result) diff --git a/bazarr/api/episodes/__init__.py b/bazarr/api/episodes/__init__.py new file mode 100644 index 000000000..2a5680ae7 --- /dev/null +++ b/bazarr/api/episodes/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .episodes import Episodes +from .episodes_subtitles import EpisodesSubtitles +from .history import EpisodesHistory +from .wanted import EpisodesWanted +from .blacklist import EpisodesBlacklist + + +api_bp_episodes = Blueprint('api_episodes', __name__) +api = Api(api_bp_episodes) + +api.add_resource(Episodes, '/episodes') +api.add_resource(EpisodesWanted, '/episodes/wanted') +api.add_resource(EpisodesSubtitles, '/episodes/subtitles') +api.add_resource(EpisodesHistory, '/episodes/history') +api.add_resource(EpisodesBlacklist, '/episodes/blacklist') diff --git a/bazarr/api/episodes/blacklist.py b/bazarr/api/episodes/blacklist.py new file mode 100644 index 000000000..800ad4ed8 --- /dev/null +++ b/bazarr/api/episodes/blacklist.py @@ -0,0 +1,92 @@ +# coding=utf-8 + +import datetime +import pretty + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableEpisodes, TableShows, TableBlacklist +from ..utils import authenticate, postprocessEpisode +from utils import blacklist_log, delete_subtitles, blacklist_delete_all, blacklist_delete +from helper import path_mappings +from get_subtitle import episode_download_subtitles +from event_handler import event_stream + + +# GET: get blacklist +# POST: add blacklist +# DELETE: remove blacklist +class EpisodesBlacklist(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + + data = TableBlacklist.select(TableShows.title.alias('seriesTitle'), + TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), + TableEpisodes.title.alias('episodeTitle'), + TableEpisodes.sonarrSeriesId, + TableBlacklist.provider, + TableBlacklist.subs_id, + TableBlacklist.language, + TableBlacklist.timestamp)\ + .join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\ + .join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\ + .order_by(TableBlacklist.timestamp.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + data = list(data) + + for item in data: + # Make timestamp pretty + item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') + item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))}) + + postprocessEpisode(item) + + return jsonify(data=data) + + @authenticate + def post(self): + sonarr_series_id = int(request.args.get('seriesid')) + sonarr_episode_id = int(request.args.get('episodeid')) + provider = request.form.get('provider') + subs_id = request.form.get('subs_id') + language = request.form.get('language') + + episodeInfo = TableEpisodes.select(TableEpisodes.path)\ + .where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\ + .dicts()\ + .get() + + media_path = episodeInfo['path'] + subtitles_path = request.form.get('subtitles_path') + + blacklist_log(sonarr_series_id=sonarr_series_id, + sonarr_episode_id=sonarr_episode_id, + provider=provider, + subs_id=subs_id, + language=language) + delete_subtitles(media_type='series', + language=language, + forced=False, + hi=False, + media_path=path_mappings.path_replace(media_path), + subtitles_path=subtitles_path, + sonarr_series_id=sonarr_series_id, + sonarr_episode_id=sonarr_episode_id) + episode_download_subtitles(sonarr_episode_id) + event_stream(type='episode-history') + return '', 200 + + @authenticate + def delete(self): + if request.args.get("all") == "true": + blacklist_delete_all() + else: + provider = request.form.get('provider') + subs_id = request.form.get('subs_id') + blacklist_delete(provider=provider, subs_id=subs_id) + return '', 204 diff --git a/bazarr/api/episodes/episodes.py b/bazarr/api/episodes/episodes.py new file mode 100644 index 000000000..1ce718c48 --- /dev/null +++ b/bazarr/api/episodes/episodes.py @@ -0,0 +1,30 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableEpisodes +from ..utils import authenticate, postprocessEpisode + + +class Episodes(Resource): + @authenticate + def get(self): + seriesId = request.args.getlist('seriesid[]') + episodeId = request.args.getlist('episodeid[]') + + if len(episodeId) > 0: + result = TableEpisodes.select().where(TableEpisodes.sonarrEpisodeId.in_(episodeId)).dicts() + elif len(seriesId) > 0: + result = TableEpisodes.select()\ + .where(TableEpisodes.sonarrSeriesId.in_(seriesId))\ + .order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\ + .dicts() + else: + return "Series or Episode ID not provided", 400 + + result = list(result) + for item in result: + postprocessEpisode(item) + + return jsonify(data=result) diff --git a/bazarr/api/episodes/episodes_subtitles.py b/bazarr/api/episodes/episodes_subtitles.py new file mode 100644 index 000000000..70f59857c --- /dev/null +++ b/bazarr/api/episodes/episodes_subtitles.py @@ -0,0 +1,178 @@ +# coding=utf-8 + +import os + +from flask import request +from flask_restful import Resource +from subliminal_patch.core import SUBTITLE_EXTENSIONS + +from database import TableEpisodes, get_audio_profile_languages, get_profile_id +from ..utils import authenticate +from helper import path_mappings +from get_providers import get_providers, get_providers_auth +from get_subtitle import download_subtitle, manual_upload_subtitle +from utils import history_log, delete_subtitles +from notifier import send_notifications +from list_subtitles import store_subtitles +from event_handler import event_stream +from config import settings + + +# PATCH: Download Subtitles +# POST: Upload Subtitles +# DELETE: Delete Subtitles +class EpisodesSubtitles(Resource): + @authenticate + def patch(self): + sonarrSeriesId = request.args.get('seriesid') + sonarrEpisodeId = request.args.get('episodeid') + episodeInfo = TableEpisodes.select(TableEpisodes.title, + TableEpisodes.path, + TableEpisodes.scene_name, + TableEpisodes.audio_language)\ + .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ + .dicts()\ + .get() + + title = episodeInfo['title'] + episodePath = path_mappings.path_replace(episodeInfo['path']) + sceneName = episodeInfo['scene_name'] + audio_language = episodeInfo['audio_language'] + if sceneName is None: sceneName = "None" + + language = request.form.get('language') + hi = request.form.get('hi').capitalize() + forced = request.form.get('forced').capitalize() + + providers_list = get_providers() + providers_auth = get_providers_auth() + + audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] + else: + audio_language = None + + try: + result = download_subtitle(episodePath, language, audio_language, hi, forced, providers_list, + providers_auth, sceneName, title, 'series', + profile_id=get_profile_id(episode_id=sonarrEpisodeId)) + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id, + subs_path) + send_notifications(sonarrSeriesId, sonarrEpisodeId, message) + store_subtitles(path, episodePath) + else: + event_stream(type='episode', payload=sonarrEpisodeId) + + except OSError: + pass + + return '', 204 + + @authenticate + def post(self): + sonarrSeriesId = request.args.get('seriesid') + sonarrEpisodeId = request.args.get('episodeid') + episodeInfo = TableEpisodes.select(TableEpisodes.title, + TableEpisodes.path, + TableEpisodes.scene_name, + TableEpisodes.audio_language)\ + .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ + .dicts()\ + .get() + + title = episodeInfo['title'] + episodePath = path_mappings.path_replace(episodeInfo['path']) + sceneName = episodeInfo['scene_name'] + audio_language = episodeInfo['audio_language'] + if sceneName is None: sceneName = "None" + + language = request.form.get('language') + forced = True if request.form.get('forced') == 'true' else False + hi = True if request.form.get('hi') == 'true' else False + subFile = request.files.get('file') + + _, ext = os.path.splitext(subFile.filename) + + if ext not in SUBTITLE_EXTENSIONS: + raise ValueError('A subtitle of an invalid format was uploaded.') + + try: + result = manual_upload_subtitle(path=episodePath, + language=language, + forced=forced, + hi=hi, + title=title, + scene_name=sceneName, + media_type='series', + subtitle=subFile, + audio_language=audio_language) + + if result is not None: + message = result[0] + path = result[1] + subs_path = result[2] + if hi: + language_code = language + ":hi" + elif forced: + language_code = language + ":forced" + else: + language_code = language + provider = "manual" + score = 360 + history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, + subtitles_path=subs_path) + if not settings.general.getboolean('dont_notify_manual_actions'): + send_notifications(sonarrSeriesId, sonarrEpisodeId, message) + store_subtitles(path, episodePath) + + except OSError: + pass + + return '', 204 + + @authenticate + def delete(self): + sonarrSeriesId = request.args.get('seriesid') + sonarrEpisodeId = request.args.get('episodeid') + episodeInfo = TableEpisodes.select(TableEpisodes.title, + TableEpisodes.path, + TableEpisodes.scene_name, + TableEpisodes.audio_language)\ + .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\ + .dicts()\ + .get() + + episodePath = path_mappings.path_replace(episodeInfo['path']) + + language = request.form.get('language') + forced = request.form.get('forced') + hi = request.form.get('hi') + subtitlesPath = request.form.get('path') + + subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath) + + delete_subtitles(media_type='series', + language=language, + forced=forced, + hi=hi, + media_path=episodePath, + subtitles_path=subtitlesPath, + sonarr_series_id=sonarrSeriesId, + sonarr_episode_id=sonarrEpisodeId) + + return '', 204 diff --git a/bazarr/api/episodes/history.py b/bazarr/api/episodes/history.py new file mode 100644 index 000000000..c3b4cab7c --- /dev/null +++ b/bazarr/api/episodes/history.py @@ -0,0 +1,133 @@ +# coding=utf-8 + +import datetime +import os +import operator +import pretty + +from flask import request, jsonify +from flask_restful import Resource +from functools import reduce +from peewee import fn +from datetime import timedelta + +from database import get_exclusion_clause, TableEpisodes, TableShows, TableHistory, TableBlacklist +from ..utils import authenticate, postprocessEpisode +from config import settings +from helper import path_mappings + + +class EpisodesHistory(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + episodeid = request.args.get('episodeid') + + upgradable_episodes_not_perfect = [] + if settings.general.getboolean('upgrade_subs'): + days_to_upgrade_subs = settings.general.days_to_upgrade_subs + minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) - + datetime.datetime(1970, 1, 1)).total_seconds() + + if settings.general.getboolean('upgrade_manual'): + query_actions = [1, 2, 3, 6] + else: + query_actions = [1, 3] + + upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)), + (TableHistory.timestamp > minimum_timestamp), + (TableHistory.score is not None)] + upgradable_episodes_conditions += get_exclusion_clause('series') + upgradable_episodes = TableHistory.select(TableHistory.video_path, + fn.MAX(TableHistory.timestamp).alias('timestamp'), + TableHistory.score, + TableShows.tags, + TableEpisodes.monitored, + TableShows.seriesType)\ + .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ + .join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(reduce(operator.and_, upgradable_episodes_conditions))\ + .group_by(TableHistory.video_path)\ + .dicts() + upgradable_episodes = list(upgradable_episodes) + for upgradable_episode in upgradable_episodes: + if upgradable_episode['timestamp'] > minimum_timestamp: + try: + int(upgradable_episode['score']) + except ValueError: + pass + else: + if int(upgradable_episode['score']) < 360: + upgradable_episodes_not_perfect.append(upgradable_episode) + + query_conditions = [(TableEpisodes.title is not None)] + if episodeid: + query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid)) + query_condition = reduce(operator.and_, query_conditions) + episode_history = TableHistory.select(TableHistory.id, + TableShows.title.alias('seriesTitle'), + TableEpisodes.monitored, + TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), + TableEpisodes.title.alias('episodeTitle'), + TableHistory.timestamp, + TableHistory.subs_id, + TableHistory.description, + TableHistory.sonarrSeriesId, + TableEpisodes.path, + TableHistory.language, + TableHistory.score, + TableShows.tags, + TableHistory.action, + TableHistory.subtitles_path, + TableHistory.sonarrEpisodeId, + TableHistory.provider, + TableShows.seriesType)\ + .join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ + .where(query_condition)\ + .order_by(TableHistory.timestamp.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + episode_history = list(episode_history) + + blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts() + blacklist_db = list(blacklist_db) + + for item in episode_history: + # Mark episode as upgradable or not + item.update({"upgradable": False}) + if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), + "tags": str(item['tags']), "monitored": str(item['monitored']), + "seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: + if os.path.isfile(path_mappings.path_replace(item['subtitles_path'])): + item.update({"upgradable": True}) + + del item['path'] + + postprocessEpisode(item) + + if item['score']: + item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%" + + # Make timestamp pretty + if item['timestamp']: + item["raw_timestamp"] = int(item['timestamp']) + item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') + item['timestamp'] = pretty.date(item["raw_timestamp"]) + + # Check if subtitles is blacklisted + item.update({"blacklisted": False}) + if item['action'] not in [0, 4, 5]: + for blacklisted_item in blacklist_db: + if blacklisted_item['provider'] == item['provider'] and \ + blacklisted_item['subs_id'] == item['subs_id']: + item.update({"blacklisted": True}) + break + + count = TableHistory.select()\ + .join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\ + .where(TableEpisodes.title is not None).count() + + return jsonify(data=episode_history, total=count) diff --git a/bazarr/api/episodes/wanted.py b/bazarr/api/episodes/wanted.py new file mode 100644 index 000000000..3634a69b3 --- /dev/null +++ b/bazarr/api/episodes/wanted.py @@ -0,0 +1,74 @@ +# coding=utf-8 + +import operator + +from flask import request, jsonify +from flask_restful import Resource +from functools import reduce + +from database import get_exclusion_clause, TableEpisodes, TableShows +from ..utils import authenticate, postprocessEpisode + + +# GET: Get Wanted Episodes +class EpisodesWanted(Resource): + @authenticate + def get(self): + episodeid = request.args.getlist('episodeid[]') + + wanted_conditions = [(TableEpisodes.missing_subtitles != '[]')] + if len(episodeid) > 0: + wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid)) + wanted_conditions += get_exclusion_clause('series') + wanted_condition = reduce(operator.and_, wanted_conditions) + + if len(episodeid) > 0: + data = TableEpisodes.select(TableShows.title.alias('seriesTitle'), + TableEpisodes.monitored, + TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), + TableEpisodes.title.alias('episodeTitle'), + TableEpisodes.missing_subtitles, + TableEpisodes.sonarrSeriesId, + TableEpisodes.sonarrEpisodeId, + TableEpisodes.scene_name.alias('sceneName'), + TableShows.tags, + TableEpisodes.failedAttempts, + TableShows.seriesType)\ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(wanted_condition)\ + .dicts() + else: + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + data = TableEpisodes.select(TableShows.title.alias('seriesTitle'), + TableEpisodes.monitored, + TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'), + TableEpisodes.title.alias('episodeTitle'), + TableEpisodes.missing_subtitles, + TableEpisodes.sonarrSeriesId, + TableEpisodes.sonarrEpisodeId, + TableEpisodes.scene_name.alias('sceneName'), + TableShows.tags, + TableEpisodes.failedAttempts, + TableShows.seriesType)\ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(wanted_condition)\ + .order_by(TableEpisodes.rowid.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + data = list(data) + + for item in data: + postprocessEpisode(item) + + count_conditions = [(TableEpisodes.missing_subtitles != '[]')] + count_conditions += get_exclusion_clause('series') + count = TableEpisodes.select(TableShows.tags, + TableShows.seriesType, + TableEpisodes.monitored)\ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(reduce(operator.and_, count_conditions))\ + .count() + + return jsonify(data=data, total=count) diff --git a/bazarr/api/files/__init__.py b/bazarr/api/files/__init__.py new file mode 100644 index 000000000..7c78565c9 --- /dev/null +++ b/bazarr/api/files/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .files import BrowseBazarrFS +from .files_sonarr import BrowseSonarrFS +from .files_radarr import BrowseRadarrFS + + +api_bp_files = Blueprint('api_files', __name__) +api = Api(api_bp_files) + +api.add_resource(BrowseBazarrFS, '/files') +api.add_resource(BrowseSonarrFS, '/files/sonarr') +api.add_resource(BrowseRadarrFS, '/files/radarr') diff --git a/bazarr/api/files/files.py b/bazarr/api/files/files.py new file mode 100644 index 000000000..bcec93b75 --- /dev/null +++ b/bazarr/api/files/files.py @@ -0,0 +1,24 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from filesystem import browse_bazarr_filesystem + +from ..utils import authenticate + + +class BrowseBazarrFS(Resource): + @authenticate + def get(self): + path = request.args.get('path') or '' + data = [] + try: + result = browse_bazarr_filesystem(path) + if result is None: + raise ValueError + except Exception: + return jsonify([]) + for item in result['directories']: + data.append({'name': item['name'], 'children': True, 'path': item['path']}) + return jsonify(data) diff --git a/bazarr/api/files/files_radarr.py b/bazarr/api/files/files_radarr.py new file mode 100644 index 000000000..156d73d33 --- /dev/null +++ b/bazarr/api/files/files_radarr.py @@ -0,0 +1,24 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from filesystem import browse_radarr_filesystem + +from ..utils import authenticate + + +class BrowseRadarrFS(Resource): + @authenticate + def get(self): + path = request.args.get('path') or '' + data = [] + try: + result = browse_radarr_filesystem(path) + if result is None: + raise ValueError + except Exception: + return jsonify([]) + for item in result['directories']: + data.append({'name': item['name'], 'children': True, 'path': item['path']}) + return jsonify(data) diff --git a/bazarr/api/files/files_sonarr.py b/bazarr/api/files/files_sonarr.py new file mode 100644 index 000000000..0007157dc --- /dev/null +++ b/bazarr/api/files/files_sonarr.py @@ -0,0 +1,24 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from filesystem import browse_sonarr_filesystem + +from ..utils import authenticate + + +class BrowseSonarrFS(Resource): + @authenticate + def get(self): + path = request.args.get('path') or '' + data = [] + try: + result = browse_sonarr_filesystem(path) + if result is None: + raise ValueError + except Exception: + return jsonify([]) + for item in result['directories']: + data.append({'name': item['name'], 'children': True, 'path': item['path']}) + return jsonify(data) diff --git a/bazarr/api/history/__init__.py b/bazarr/api/history/__init__.py new file mode 100644 index 000000000..2783a1233 --- /dev/null +++ b/bazarr/api/history/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .stats import HistoryStats + + +api_bp_history = Blueprint('api_history', __name__) +api = Api(api_bp_history) + +api.add_resource(HistoryStats, '/history/stats') diff --git a/bazarr/api/history/stats.py b/bazarr/api/history/stats.py new file mode 100644 index 000000000..b9651399d --- /dev/null +++ b/bazarr/api/history/stats.py @@ -0,0 +1,85 @@ +# coding=utf-8 + +import time +import datetime +import operator + +from dateutil import rrule +from flask import request, jsonify +from flask_restful import Resource +from functools import reduce +from peewee import fn + +from database import TableHistory, TableHistoryMovie + +from ..utils import authenticate + + +class HistoryStats(Resource): + @authenticate + def get(self): + timeframe = request.args.get('timeframe') or 'month' + action = request.args.get('action') or 'All' + provider = request.args.get('provider') or 'All' + language = request.args.get('language') or 'All' + + # timeframe must be in ['week', 'month', 'trimester', 'year'] + if timeframe == 'year': + delay = 364 * 24 * 60 * 60 + elif timeframe == 'trimester': + delay = 90 * 24 * 60 * 60 + elif timeframe == 'month': + delay = 30 * 24 * 60 * 60 + elif timeframe == 'week': + delay = 6 * 24 * 60 * 60 + + now = time.time() + past = now - delay + + history_where_clauses = [(TableHistory.timestamp.between(past, now))] + history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))] + + if action != 'All': + history_where_clauses.append((TableHistory.action == action)) + history_where_clauses_movie.append((TableHistoryMovie.action == action)) + else: + history_where_clauses.append((TableHistory.action.in_([1, 2, 3]))) + history_where_clauses_movie.append((TableHistoryMovie.action.in_([1, 2, 3]))) + + if provider != 'All': + history_where_clauses.append((TableHistory.provider == provider)) + history_where_clauses_movie.append((TableHistoryMovie.provider == provider)) + + if language != 'All': + history_where_clauses.append((TableHistory.language == language)) + history_where_clauses_movie.append((TableHistoryMovie.language == language)) + + history_where_clause = reduce(operator.and_, history_where_clauses) + history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie) + + data_series = TableHistory.select(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch').alias('date'), + fn.COUNT(TableHistory.id).alias('count'))\ + .where(history_where_clause) \ + .group_by(fn.strftime('%Y-%m-%d', TableHistory.timestamp, 'unixepoch'))\ + .dicts() + data_series = list(data_series) + + data_movies = TableHistoryMovie.select(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch').alias('date'), + fn.COUNT(TableHistoryMovie.id).alias('count')) \ + .where(history_where_clause_movie) \ + .group_by(fn.strftime('%Y-%m-%d', TableHistoryMovie.timestamp, 'unixepoch')) \ + .dicts() + data_movies = list(data_movies) + + for dt in rrule.rrule(rrule.DAILY, + dtstart=datetime.datetime.now() - datetime.timedelta(seconds=delay), + until=datetime.datetime.now()): + if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_series): + data_series.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0}) + if not any(d['date'] == dt.strftime('%Y-%m-%d') for d in data_movies): + data_movies.append({'date': dt.strftime('%Y-%m-%d'), 'count': 0}) + + sorted_data_series = sorted(data_series, key=lambda i: i['date']) + sorted_data_movies = sorted(data_movies, key=lambda i: i['date']) + + return jsonify(series=sorted_data_series, movies=sorted_data_movies) diff --git a/bazarr/api/movies/__init__.py b/bazarr/api/movies/__init__.py new file mode 100644 index 000000000..eb691f529 --- /dev/null +++ b/bazarr/api/movies/__init__.py @@ -0,0 +1,20 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .movies import Movies +from .movies_subtitles import MoviesSubtitles +from .history import MoviesHistory +from .wanted import MoviesWanted +from .blacklist import MoviesBlacklist + + +api_bp_movies = Blueprint('api_movies', __name__) +api = Api(api_bp_movies) + +api.add_resource(Movies, '/movies') +api.add_resource(MoviesWanted, '/movies/wanted') +api.add_resource(MoviesSubtitles, '/movies/subtitles') +api.add_resource(MoviesHistory, '/movies/history') +api.add_resource(MoviesBlacklist, '/movies/blacklist') diff --git a/bazarr/api/movies/blacklist.py b/bazarr/api/movies/blacklist.py new file mode 100644 index 000000000..cda24f348 --- /dev/null +++ b/bazarr/api/movies/blacklist.py @@ -0,0 +1,86 @@ +# coding=utf-8 + +import datetime +import pretty + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableMovies, TableBlacklistMovie +from ..utils import authenticate, postprocessMovie +from utils import blacklist_log_movie, delete_subtitles, blacklist_delete_all_movie, blacklist_delete_movie +from helper import path_mappings +from get_subtitle import movies_download_subtitles +from event_handler import event_stream + + +# GET: get blacklist +# POST: add blacklist +# DELETE: remove blacklist +class MoviesBlacklist(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + + data = TableBlacklistMovie.select(TableMovies.title, + TableMovies.radarrId, + TableBlacklistMovie.provider, + TableBlacklistMovie.subs_id, + TableBlacklistMovie.language, + TableBlacklistMovie.timestamp)\ + .join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\ + .order_by(TableBlacklistMovie.timestamp.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + data = list(data) + + for item in data: + postprocessMovie(item) + + # Make timestamp pretty + item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') + item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))}) + + return jsonify(data=data) + + @authenticate + def post(self): + radarr_id = int(request.args.get('radarrid')) + provider = request.form.get('provider') + subs_id = request.form.get('subs_id') + language = request.form.get('language') + # TODO + forced = False + hi = False + + data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get() + + media_path = data['path'] + subtitles_path = request.form.get('subtitles_path') + + blacklist_log_movie(radarr_id=radarr_id, + provider=provider, + subs_id=subs_id, + language=language) + delete_subtitles(media_type='movie', + language=language, + forced=forced, + hi=hi, + media_path=path_mappings.path_replace_movie(media_path), + subtitles_path=subtitles_path, + radarr_id=radarr_id) + movies_download_subtitles(radarr_id) + event_stream(type='movie-history') + return '', 200 + + @authenticate + def delete(self): + if request.args.get("all") == "true": + blacklist_delete_all_movie() + else: + provider = request.form.get('provider') + subs_id = request.form.get('subs_id') + blacklist_delete_movie(provider=provider, subs_id=subs_id) + return '', 200 diff --git a/bazarr/api/movies/history.py b/bazarr/api/movies/history.py new file mode 100644 index 000000000..13e3c6d88 --- /dev/null +++ b/bazarr/api/movies/history.py @@ -0,0 +1,129 @@ +# coding=utf-8 + +import datetime +import os +import operator +import pretty + +from flask import request, jsonify +from flask_restful import Resource +from functools import reduce +from peewee import fn +from datetime import timedelta + +from database import get_exclusion_clause, TableMovies, TableHistoryMovie, TableBlacklistMovie +from ..utils import authenticate, postprocessMovie +from config import settings +from helper import path_mappings + + +class MoviesHistory(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + radarrid = request.args.get('radarrid') + + upgradable_movies = [] + upgradable_movies_not_perfect = [] + if settings.general.getboolean('upgrade_subs'): + days_to_upgrade_subs = settings.general.days_to_upgrade_subs + minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) - + datetime.datetime(1970, 1, 1)).total_seconds() + + if settings.general.getboolean('upgrade_manual'): + query_actions = [1, 2, 3, 6] + else: + query_actions = [1, 3] + + upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)), + (TableHistoryMovie.timestamp > minimum_timestamp), + (TableHistoryMovie.score is not None)] + upgradable_movies_conditions += get_exclusion_clause('movie') + upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path, + fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'), + TableHistoryMovie.score, + TableMovies.tags, + TableMovies.monitored)\ + .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ + .where(reduce(operator.and_, upgradable_movies_conditions))\ + .group_by(TableHistoryMovie.video_path)\ + .dicts() + upgradable_movies = list(upgradable_movies) + + for upgradable_movie in upgradable_movies: + if upgradable_movie['timestamp'] > minimum_timestamp: + try: + int(upgradable_movie['score']) + except ValueError: + pass + else: + if int(upgradable_movie['score']) < 120: + upgradable_movies_not_perfect.append(upgradable_movie) + + query_conditions = [(TableMovies.title is not None)] + if radarrid: + query_conditions.append((TableMovies.radarrId == radarrid)) + query_condition = reduce(operator.and_, query_conditions) + + movie_history = TableHistoryMovie.select(TableHistoryMovie.id, + TableHistoryMovie.action, + TableMovies.title, + TableHistoryMovie.timestamp, + TableHistoryMovie.description, + TableHistoryMovie.radarrId, + TableMovies.monitored, + TableHistoryMovie.video_path.alias('path'), + TableHistoryMovie.language, + TableMovies.tags, + TableHistoryMovie.score, + TableHistoryMovie.subs_id, + TableHistoryMovie.provider, + TableHistoryMovie.subtitles_path)\ + .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ + .where(query_condition)\ + .order_by(TableHistoryMovie.timestamp.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + movie_history = list(movie_history) + + blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts() + blacklist_db = list(blacklist_db) + + for item in movie_history: + # Mark movies as upgradable or not + item.update({"upgradable": False}) + if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']), + "tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: + if os.path.isfile(path_mappings.path_replace_movie(item['subtitles_path'])): + item.update({"upgradable": True}) + + del item['path'] + + postprocessMovie(item) + + if item['score']: + item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%" + + # Make timestamp pretty + if item['timestamp']: + item["raw_timestamp"] = int(item['timestamp']) + item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X') + item['timestamp'] = pretty.date(item["raw_timestamp"]) + + # Check if subtitles is blacklisted + item.update({"blacklisted": False}) + if item['action'] not in [0, 4, 5]: + for blacklisted_item in blacklist_db: + if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[ + 'subs_id']: + item.update({"blacklisted": True}) + break + + count = TableHistoryMovie.select()\ + .join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\ + .where(TableMovies.title is not None)\ + .count() + + return jsonify(data=movie_history, total=count) diff --git a/bazarr/api/movies/movies.py b/bazarr/api/movies/movies.py new file mode 100644 index 000000000..d088a71a5 --- /dev/null +++ b/bazarr/api/movies/movies.py @@ -0,0 +1,80 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableMovies +from ..utils import authenticate, postprocessMovie, None_Keys +from list_subtitles import list_missing_subtitles_movies, movies_scan_subtitles +from event_handler import event_stream +from get_subtitle import movies_download_subtitles, wanted_search_missing_subtitles_movies + + +class Movies(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + radarrId = request.args.getlist('radarrid[]') + + count = TableMovies.select().count() + + if len(radarrId) != 0: + result = TableMovies.select()\ + .where(TableMovies.radarrId.in_(radarrId))\ + .order_by(TableMovies.sortTitle)\ + .dicts() + else: + result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts() + result = list(result) + for item in result: + postprocessMovie(item) + + return jsonify(data=result, total=count) + + @authenticate + def post(self): + radarrIdList = request.form.getlist('radarrid') + profileIdList = request.form.getlist('profileid') + + for idx in range(len(radarrIdList)): + radarrId = radarrIdList[idx] + profileId = profileIdList[idx] + + if profileId in None_Keys: + profileId = None + else: + try: + profileId = int(profileId) + except Exception: + return '', 400 + + TableMovies.update({ + TableMovies.profileId: profileId + })\ + .where(TableMovies.radarrId == radarrId)\ + .execute() + + list_missing_subtitles_movies(no=radarrId, send_event=False) + + event_stream(type='movie', payload=radarrId) + event_stream(type='movie-wanted', payload=radarrId) + event_stream(type='badges') + + return '', 204 + + @authenticate + def patch(self): + radarrid = request.form.get('radarrid') + action = request.form.get('action') + if action == "scan-disk": + movies_scan_subtitles(radarrid) + return '', 204 + elif action == "search-missing": + movies_download_subtitles(radarrid) + return '', 204 + elif action == "search-wanted": + wanted_search_missing_subtitles_movies() + return '', 204 + + return '', 400 diff --git a/bazarr/api/movies/movies_subtitles.py b/bazarr/api/movies/movies_subtitles.py new file mode 100644 index 000000000..c4f1a1e2a --- /dev/null +++ b/bazarr/api/movies/movies_subtitles.py @@ -0,0 +1,176 @@ +# coding=utf-8 + +import os + +from flask import request +from flask_restful import Resource +from subliminal_patch.core import SUBTITLE_EXTENSIONS + +from database import TableMovies, get_audio_profile_languages, get_profile_id +from ..utils import authenticate +from helper import path_mappings +from get_providers import get_providers, get_providers_auth +from get_subtitle import download_subtitle, manual_upload_subtitle +from utils import history_log_movie, delete_subtitles +from notifier import send_notifications_movie +from list_subtitles import store_subtitles_movie +from event_handler import event_stream +from config import settings + + +# PATCH: Download Subtitles +# POST: Upload Subtitles +# DELETE: Delete Subtitles +class MoviesSubtitles(Resource): + @authenticate + def patch(self): + # Download + radarrId = request.args.get('radarrid') + + movieInfo = TableMovies.select(TableMovies.title, + TableMovies.path, + TableMovies.sceneName, + TableMovies.audio_language)\ + .where(TableMovies.radarrId == radarrId)\ + .dicts()\ + .get() + + moviePath = path_mappings.path_replace_movie(movieInfo['path']) + sceneName = movieInfo['sceneName'] + if sceneName is None: sceneName = 'None' + + title = movieInfo['title'] + audio_language = movieInfo['audio_language'] + + language = request.form.get('language') + hi = request.form.get('hi').capitalize() + forced = request.form.get('forced').capitalize() + + providers_list = get_providers() + providers_auth = get_providers_auth() + + audio_language_list = get_audio_profile_languages(movie_id=radarrId) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] + else: + audio_language = None + + try: + result = download_subtitle(moviePath, language, audio_language, hi, forced, providers_list, + providers_auth, sceneName, title, 'movie', + profile_id=get_profile_id(movie_id=radarrId)) + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + history_log_movie(1, radarrId, message, path, language_code, provider, score, subs_id, subs_path) + send_notifications_movie(radarrId, message) + store_subtitles_movie(path, moviePath) + else: + event_stream(type='movie', payload=radarrId) + except OSError: + pass + + return '', 204 + + @authenticate + def post(self): + # Upload + # TODO: Support Multiply Upload + radarrId = request.args.get('radarrid') + movieInfo = TableMovies.select(TableMovies.title, + TableMovies.path, + TableMovies.sceneName, + TableMovies.audio_language) \ + .where(TableMovies.radarrId == radarrId) \ + .dicts() \ + .get() + + moviePath = path_mappings.path_replace_movie(movieInfo['path']) + sceneName = movieInfo['sceneName'] + if sceneName is None: sceneName = 'None' + + title = movieInfo['title'] + audioLanguage = movieInfo['audio_language'] + + language = request.form.get('language') + forced = True if request.form.get('forced') == 'true' else False + hi = True if request.form.get('hi') == 'true' else False + subFile = request.files.get('file') + + _, ext = os.path.splitext(subFile.filename) + + if ext not in SUBTITLE_EXTENSIONS: + raise ValueError('A subtitle of an invalid format was uploaded.') + + try: + result = manual_upload_subtitle(path=moviePath, + language=language, + forced=forced, + hi=hi, + title=title, + scene_name=sceneName, + media_type='movie', + subtitle=subFile, + audio_language=audioLanguage) + + if result is not None: + message = result[0] + path = result[1] + subs_path = result[2] + if hi: + language_code = language + ":hi" + elif forced: + language_code = language + ":forced" + else: + language_code = language + provider = "manual" + score = 120 + history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path) + if not settings.general.getboolean('dont_notify_manual_actions'): + send_notifications_movie(radarrId, message) + store_subtitles_movie(path, moviePath) + except OSError: + pass + + return '', 204 + + @authenticate + def delete(self): + # Delete + radarrId = request.args.get('radarrid') + movieInfo = TableMovies.select(TableMovies.path) \ + .where(TableMovies.radarrId == radarrId) \ + .dicts() \ + .get() + + moviePath = path_mappings.path_replace_movie(movieInfo['path']) + + language = request.form.get('language') + forced = request.form.get('forced') + hi = request.form.get('hi') + subtitlesPath = request.form.get('path') + + subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath) + + result = delete_subtitles(media_type='movie', + language=language, + forced=forced, + hi=hi, + media_path=moviePath, + subtitles_path=subtitlesPath, + radarr_id=radarrId) + if result: + return '', 202 + else: + return '', 204 diff --git a/bazarr/api/movies/wanted.py b/bazarr/api/movies/wanted.py new file mode 100644 index 000000000..45511f0ab --- /dev/null +++ b/bazarr/api/movies/wanted.py @@ -0,0 +1,62 @@ +# coding=utf-8 + +import operator + +from flask import request, jsonify +from flask_restful import Resource +from functools import reduce + +from database import get_exclusion_clause, TableMovies +from ..utils import authenticate, postprocessMovie + + +# GET: Get Wanted Movies +class MoviesWanted(Resource): + @authenticate + def get(self): + radarrid = request.args.getlist("radarrid[]") + + wanted_conditions = [(TableMovies.missing_subtitles != '[]')] + if len(radarrid) > 0: + wanted_conditions.append((TableMovies.radarrId.in_(radarrid))) + wanted_conditions += get_exclusion_clause('movie') + wanted_condition = reduce(operator.and_, wanted_conditions) + + if len(radarrid) > 0: + result = TableMovies.select(TableMovies.title, + TableMovies.missing_subtitles, + TableMovies.radarrId, + TableMovies.sceneName, + TableMovies.failedAttempts, + TableMovies.tags, + TableMovies.monitored)\ + .where(wanted_condition)\ + .dicts() + else: + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + result = TableMovies.select(TableMovies.title, + TableMovies.missing_subtitles, + TableMovies.radarrId, + TableMovies.sceneName, + TableMovies.failedAttempts, + TableMovies.tags, + TableMovies.monitored)\ + .where(wanted_condition)\ + .order_by(TableMovies.rowid.desc())\ + .limit(length)\ + .offset(start)\ + .dicts() + result = list(result) + + for item in result: + postprocessMovie(item) + + count_conditions = [(TableMovies.missing_subtitles != '[]')] + count_conditions += get_exclusion_clause('movie') + count = TableMovies.select(TableMovies.monitored, + TableMovies.tags)\ + .where(reduce(operator.and_, count_conditions))\ + .count() + + return jsonify(data=result, total=count) diff --git a/bazarr/api/providers/__init__.py b/bazarr/api/providers/__init__.py new file mode 100644 index 000000000..dc3793d70 --- /dev/null +++ b/bazarr/api/providers/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .providers import Providers +from .providers_episodes import ProviderEpisodes +from .providers_movies import ProviderMovies + + +api_bp_providers = Blueprint('api_providers', __name__) +api = Api(api_bp_providers) + +api.add_resource(Providers, '/providers') +api.add_resource(ProviderMovies, '/providers/movies') +api.add_resource(ProviderEpisodes, '/providers/episodes') diff --git a/bazarr/api/providers/providers.py b/bazarr/api/providers/providers.py new file mode 100644 index 000000000..0bcfb45a7 --- /dev/null +++ b/bazarr/api/providers/providers.py @@ -0,0 +1,52 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource +from operator import itemgetter + +from database import TableHistory, TableHistoryMovie +from get_providers import list_throttled_providers, reset_throttled_providers +from ..utils import authenticate, False_Keys + + +class Providers(Resource): + @authenticate + def get(self): + history = request.args.get('history') + if history and history not in False_Keys: + providers = list(TableHistory.select(TableHistory.provider) + .where(TableHistory.provider != None and TableHistory.provider != "manual") + .dicts()) + providers += list(TableHistoryMovie.select(TableHistoryMovie.provider) + .where(TableHistoryMovie.provider != None and TableHistoryMovie.provider != "manual") + .dicts()) + providers_list = list(set([x['provider'] for x in providers])) + providers_dicts = [] + for provider in providers_list: + providers_dicts.append({ + 'name': provider, + 'status': 'History', + 'retry': '-' + }) + return jsonify(data=sorted(providers_dicts, key=itemgetter('name'))) + + throttled_providers = list_throttled_providers() + + providers = list() + for provider in throttled_providers: + providers.append({ + "name": provider[0], + "status": provider[1] if provider[1] is not None else "Good", + "retry": provider[2] if provider[2] != "now" else "-" + }) + return jsonify(data=providers) + + @authenticate + def post(self): + action = request.form.get('action') + + if action == 'reset': + reset_throttled_providers() + return '', 204 + + return '', 400 diff --git a/bazarr/api/providers/providers_episodes.py b/bazarr/api/providers/providers_episodes.py new file mode 100644 index 000000000..283cc160d --- /dev/null +++ b/bazarr/api/providers/providers_episodes.py @@ -0,0 +1,104 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id +from helper import path_mappings +from get_providers import get_providers, get_providers_auth +from get_subtitle import manual_search, manual_download_subtitle +from utils import history_log +from config import settings +from notifier import send_notifications +from list_subtitles import store_subtitles + +from ..utils import authenticate + + +class ProviderEpisodes(Resource): + @authenticate + def get(self): + # Manual Search + sonarrEpisodeId = request.args.get('episodeid') + episodeInfo = TableEpisodes.select(TableEpisodes.title, + TableEpisodes.path, + TableEpisodes.scene_name, + TableShows.profileId) \ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\ + .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \ + .dicts() \ + .get() + + title = episodeInfo['title'] + episodePath = path_mappings.path_replace(episodeInfo['path']) + sceneName = episodeInfo['scene_name'] + profileId = episodeInfo['profileId'] + if sceneName is None: sceneName = "None" + + providers_list = get_providers() + providers_auth = get_providers_auth() + + data = manual_search(episodePath, profileId, providers_list, providers_auth, sceneName, title, + 'series') + if not data: + data = [] + return jsonify(data=data) + + @authenticate + def post(self): + # Manual Download + sonarrSeriesId = request.args.get('seriesid') + sonarrEpisodeId = request.args.get('episodeid') + episodeInfo = TableEpisodes.select(TableEpisodes.title, + TableEpisodes.path, + TableEpisodes.scene_name) \ + .where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \ + .dicts() \ + .get() + + title = episodeInfo['title'] + episodePath = path_mappings.path_replace(episodeInfo['path']) + sceneName = episodeInfo['scene_name'] + if sceneName is None: sceneName = "None" + + language = request.form.get('language') + hi = request.form.get('hi').capitalize() + forced = request.form.get('forced').capitalize() + selected_provider = request.form.get('provider') + subtitle = request.form.get('subtitle') + providers_auth = get_providers_auth() + + audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] + else: + audio_language = 'None' + + try: + result = manual_download_subtitle(episodePath, language, audio_language, hi, forced, subtitle, + selected_provider, providers_auth, sceneName, title, 'series', + profile_id=get_profile_id(episode_id=sonarrEpisodeId)) + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id, + subs_path) + if not settings.general.getboolean('dont_notify_manual_actions'): + send_notifications(sonarrSeriesId, sonarrEpisodeId, message) + store_subtitles(path, episodePath) + return result, 201 + except OSError: + pass + + return '', 204 diff --git a/bazarr/api/providers/providers_movies.py b/bazarr/api/providers/providers_movies.py new file mode 100644 index 000000000..d5c31c1c4 --- /dev/null +++ b/bazarr/api/providers/providers_movies.py @@ -0,0 +1,103 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from database import TableMovies, get_audio_profile_languages, get_profile_id +from helper import path_mappings +from get_providers import get_providers, get_providers_auth +from get_subtitle import manual_search, manual_download_subtitle +from utils import history_log_movie +from config import settings +from notifier import send_notifications_movie +from list_subtitles import store_subtitles_movie + +from ..utils import authenticate + + +class ProviderMovies(Resource): + @authenticate + def get(self): + # Manual Search + radarrId = request.args.get('radarrid') + movieInfo = TableMovies.select(TableMovies.title, + TableMovies.path, + TableMovies.sceneName, + TableMovies.profileId) \ + .where(TableMovies.radarrId == radarrId) \ + .dicts() \ + .get() + + title = movieInfo['title'] + moviePath = path_mappings.path_replace_movie(movieInfo['path']) + sceneName = movieInfo['sceneName'] + profileId = movieInfo['profileId'] + if sceneName is None: sceneName = "None" + + providers_list = get_providers() + providers_auth = get_providers_auth() + + data = manual_search(moviePath, profileId, providers_list, providers_auth, sceneName, title, + 'movie') + if not data: + data = [] + return jsonify(data=data) + + @authenticate + def post(self): + # Manual Download + radarrId = request.args.get('radarrid') + movieInfo = TableMovies.select(TableMovies.title, + TableMovies.path, + TableMovies.sceneName, + TableMovies.audio_language) \ + .where(TableMovies.radarrId == radarrId) \ + .dicts() \ + .get() + + title = movieInfo['title'] + moviePath = path_mappings.path_replace_movie(movieInfo['path']) + sceneName = movieInfo['sceneName'] + if sceneName is None: sceneName = "None" + audio_language = movieInfo['audio_language'] + + language = request.form.get('language') + hi = request.form.get('hi').capitalize() + forced = request.form.get('forced').capitalize() + selected_provider = request.form.get('provider') + subtitle = request.form.get('subtitle') + + providers_auth = get_providers_auth() + + audio_language_list = get_audio_profile_languages(movie_id=radarrId) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] + else: + audio_language = 'None' + + try: + result = manual_download_subtitle(moviePath, language, audio_language, hi, forced, subtitle, + selected_provider, providers_auth, sceneName, title, 'movie', + profile_id=get_profile_id(movie_id=radarrId)) + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path) + if not settings.general.getboolean('dont_notify_manual_actions'): + send_notifications_movie(radarrId, message) + store_subtitles_movie(path, moviePath) + except OSError: + pass + + return '', 204 diff --git a/bazarr/api/series/__init__.py b/bazarr/api/series/__init__.py new file mode 100644 index 000000000..68f437ba4 --- /dev/null +++ b/bazarr/api/series/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .series import Series + + +api_bp_series = Blueprint('api_series', __name__) +api = Api(api_bp_series) + +api.add_resource(Series, '/series') diff --git a/bazarr/api/series/series.py b/bazarr/api/series/series.py new file mode 100644 index 000000000..057462f11 --- /dev/null +++ b/bazarr/api/series/series.py @@ -0,0 +1,114 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +import operator +from functools import reduce + +from database import get_exclusion_clause, TableEpisodes, TableShows +from list_subtitles import list_missing_subtitles, series_scan_subtitles +from get_subtitle import series_download_subtitles, wanted_search_missing_subtitles_series +from ..utils import authenticate, postprocessSeries, None_Keys +from event_handler import event_stream + + +class Series(Resource): + @authenticate + def get(self): + start = request.args.get('start') or 0 + length = request.args.get('length') or -1 + seriesId = request.args.getlist('seriesid[]') + + count = TableShows.select().count() + + if len(seriesId) != 0: + result = TableShows.select() \ + .where(TableShows.sonarrSeriesId.in_(seriesId)) \ + .order_by(TableShows.sortTitle).dicts() + else: + result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts() + + result = list(result) + + for item in result: + postprocessSeries(item) + + # Add missing subtitles episode count + episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']), + (TableEpisodes.missing_subtitles != '[]')] + episodes_missing_conditions += get_exclusion_clause('series') + + episodeMissingCount = TableEpisodes.select(TableShows.tags, + TableEpisodes.monitored, + TableShows.seriesType) \ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \ + .where(reduce(operator.and_, episodes_missing_conditions)) \ + .count() + item.update({"episodeMissingCount": episodeMissingCount}) + + # Add episode count + episodeFileCount = TableEpisodes.select(TableShows.tags, + TableEpisodes.monitored, + TableShows.seriesType) \ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \ + .where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']) \ + .count() + item.update({"episodeFileCount": episodeFileCount}) + + return jsonify(data=result, total=count) + + @authenticate + def post(self): + seriesIdList = request.form.getlist('seriesid') + profileIdList = request.form.getlist('profileid') + + for idx in range(len(seriesIdList)): + seriesId = seriesIdList[idx] + profileId = profileIdList[idx] + + if profileId in None_Keys: + profileId = None + else: + try: + profileId = int(profileId) + except Exception: + return '', 400 + + TableShows.update({ + TableShows.profileId: profileId + }) \ + .where(TableShows.sonarrSeriesId == seriesId) \ + .execute() + + list_missing_subtitles(no=seriesId, send_event=False) + + event_stream(type='series', payload=seriesId) + + episode_id_list = TableEpisodes \ + .select(TableEpisodes.sonarrEpisodeId) \ + .where(TableEpisodes.sonarrSeriesId == seriesId) \ + .dicts() + + for item in episode_id_list: + event_stream(type='episode-wanted', payload=item['sonarrEpisodeId']) + + event_stream(type='badges') + + return '', 204 + + @authenticate + def patch(self): + seriesid = request.form.get('seriesid') + action = request.form.get('action') + if action == "scan-disk": + series_scan_subtitles(seriesid) + return '', 204 + elif action == "search-missing": + series_download_subtitles(seriesid) + return '', 204 + elif action == "search-wanted": + wanted_search_missing_subtitles_series() + return '', 204 + + return '', 400 diff --git a/bazarr/api/subtitles/__init__.py b/bazarr/api/subtitles/__init__.py new file mode 100644 index 000000000..7cb7d9a11 --- /dev/null +++ b/bazarr/api/subtitles/__init__.py @@ -0,0 +1,14 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .subtitles import Subtitles +from .subtitles_info import SubtitleNameInfo + + +api_bp_subtitles = Blueprint('api_subtitles', __name__) +api = Api(api_bp_subtitles) + +api.add_resource(Subtitles, '/subtitles') +api.add_resource(SubtitleNameInfo, '/subtitles/info') diff --git a/bazarr/api/subtitles/subtitles.py b/bazarr/api/subtitles/subtitles.py new file mode 100644 index 000000000..7799a9679 --- /dev/null +++ b/bazarr/api/subtitles/subtitles.py @@ -0,0 +1,72 @@ +# coding=utf-8 + +import os +import sys + +from flask import request +from flask_restful import Resource + +from database import TableEpisodes, TableMovies +from helper import path_mappings +from ..utils import authenticate +from subsyncer import subsync +from utils import translate_subtitles_file, subtitles_apply_mods +from get_subtitle import store_subtitles, store_subtitles_movie +from config import settings + + +class Subtitles(Resource): + @authenticate + def patch(self): + action = request.args.get('action') + + language = request.form.get('language') + subtitles_path = request.form.get('path') + media_type = request.form.get('type') + id = request.form.get('id') + + if media_type == 'episode': + subtitles_path = path_mappings.path_replace(subtitles_path) + metadata = TableEpisodes.select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)\ + .where(TableEpisodes.sonarrEpisodeId == id)\ + .dicts()\ + .get() + video_path = path_mappings.path_replace(metadata['path']) + else: + subtitles_path = path_mappings.path_replace_movie(subtitles_path) + metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get() + video_path = path_mappings.path_replace_movie(metadata['path']) + + if action == 'sync': + if media_type == 'episode': + subsync.sync(video_path=video_path, srt_path=subtitles_path, + srt_lang=language, media_type='series', sonarr_series_id=metadata['sonarrSeriesId'], + sonarr_episode_id=int(id)) + else: + subsync.sync(video_path=video_path, srt_path=subtitles_path, + srt_lang=language, media_type='movies', radarr_id=id) + elif action == 'translate': + dest_language = language + forced = True if request.form.get('forced') == 'true' else False + hi = True if request.form.get('hi') == 'true' else False + result = translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path, + to_lang=dest_language, + forced=forced, hi=hi) + if result: + if media_type == 'episode': + store_subtitles(path_mappings.path_replace_reverse(video_path), video_path) + else: + store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path) + return '', 200 + else: + return '', 404 + else: + subtitles_apply_mods(language, subtitles_path, [action]) + + # apply chmod if required + chmod = int(settings.general.chmod, 8) if not sys.platform.startswith( + 'win') and settings.general.getboolean('chmod_enabled') else None + if chmod: + os.chmod(subtitles_path, chmod) + + return '', 204 diff --git a/bazarr/api/subtitles/subtitles_info.py b/bazarr/api/subtitles/subtitles_info.py new file mode 100644 index 000000000..9de38ac55 --- /dev/null +++ b/bazarr/api/subtitles/subtitles_info.py @@ -0,0 +1,41 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource +from subliminal_patch.core import guessit +from ..utils import authenticate + + +class SubtitleNameInfo(Resource): + @authenticate + def get(self): + names = request.args.getlist('filenames[]') + results = [] + for name in names: + opts = dict() + opts['type'] = 'episode' + guessit_result = guessit(name, options=opts) + result = {} + result['filename'] = name + if 'subtitle_language' in guessit_result: + result['subtitle_language'] = str(guessit_result['subtitle_language']) + + result['episode'] = 0 + if 'episode' in guessit_result: + if isinstance(guessit_result['episode'], list): + # for multiple episodes file, choose the first episode number + if len(guessit_result['episode']): + # make sure that guessit returned a list of more than 0 items + result['episode'] = int(guessit_result['episode'][0]) + elif isinstance(guessit_result['episode'], (str, int)): + # if single episode (should be int but just in case we cast it to int) + result['episode'] = int(guessit_result['episode']) + + if 'season' in guessit_result: + result['season'] = int(guessit_result['season']) + else: + result['season'] = 0 + + results.append(result) + + return jsonify(data=results) diff --git a/bazarr/api/system/__init__.py b/bazarr/api/system/__init__.py new file mode 100644 index 000000000..cbe54a13b --- /dev/null +++ b/bazarr/api/system/__init__.py @@ -0,0 +1,33 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .system import System +from .searches import Searches +from .account import SystemAccount +from .tasks import SystemTasks +from .logs import SystemLogs +from .status import SystemStatus +from .health import SystemHealth +from .releases import SystemReleases +from .settings import SystemSettings +from .languages import Languages +from .languages_profiles import LanguagesProfiles +from .notifications import Notifications + +api_bp_system = Blueprint('api_system', __name__) +api = Api(api_bp_system) + +api.add_resource(System, '/system') +api.add_resource(Searches, '/system/searches') +api.add_resource(SystemAccount, '/system/account') +api.add_resource(SystemTasks, '/system/tasks') +api.add_resource(SystemLogs, '/system/logs') +api.add_resource(SystemStatus, '/system/status') +api.add_resource(SystemHealth, '/system/health') +api.add_resource(SystemReleases, '/system/releases') +api.add_resource(SystemSettings, '/system/settings') +api.add_resource(Languages, '/system/languages') +api.add_resource(LanguagesProfiles, '/system/languages/profiles') +api.add_resource(Notifications, '/system/notifications') diff --git a/bazarr/api/system/account.py b/bazarr/api/system/account.py new file mode 100644 index 000000000..a46e2b331 --- /dev/null +++ b/bazarr/api/system/account.py @@ -0,0 +1,29 @@ +# coding=utf-8 + +import gc + +from flask import request, session +from flask_restful import Resource + +from config import settings +from utils import check_credentials + + +class SystemAccount(Resource): + def post(self): + if settings.auth.type != 'form': + return '', 405 + + action = request.args.get('action') + if action == 'login': + username = request.form.get('username') + password = request.form.get('password') + if check_credentials(username, password): + session['logged_in'] = True + return '', 204 + elif action == 'logout': + session.clear() + gc.collect() + return '', 204 + + return '', 401 diff --git a/bazarr/api/system/health.py b/bazarr/api/system/health.py new file mode 100644 index 000000000..3db5374a1 --- /dev/null +++ b/bazarr/api/system/health.py @@ -0,0 +1,13 @@ +# coding=utf-8 + +from flask import jsonify +from flask_restful import Resource + +from ..utils import authenticate +from utils import get_health_issues + + +class SystemHealth(Resource): + @authenticate + def get(self): + return jsonify(data=get_health_issues()) diff --git a/bazarr/api/system/languages.py b/bazarr/api/system/languages.py new file mode 100644 index 000000000..0f69a8eea --- /dev/null +++ b/bazarr/api/system/languages.py @@ -0,0 +1,54 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from operator import itemgetter + +from ..utils import authenticate, False_Keys +from database import TableHistory, TableHistoryMovie, TableSettingsLanguages +from get_languages import alpha2_from_alpha3, language_from_alpha2 + + +class Languages(Resource): + @authenticate + def get(self): + history = request.args.get('history') + if history and history not in False_Keys: + languages = list(TableHistory.select(TableHistory.language) + .where(TableHistory.language != None) + .dicts()) + languages += list(TableHistoryMovie.select(TableHistoryMovie.language) + .where(TableHistoryMovie.language != None) + .dicts()) + languages_list = list(set([l['language'].split(':')[0] for l in languages])) + languages_dicts = [] + for language in languages_list: + code2 = None + if len(language) == 2: + code2 = language + elif len(language) == 3: + code2 = alpha2_from_alpha3(language) + else: + continue + + if not any(x['code2'] == code2 for x in languages_dicts): + try: + languages_dicts.append({ + 'code2': code2, + 'name': language_from_alpha2(code2), + # Compatibility: Use false temporarily + 'enabled': False + }) + except: + continue + return jsonify(sorted(languages_dicts, key=itemgetter('name'))) + + result = TableSettingsLanguages.select(TableSettingsLanguages.name, + TableSettingsLanguages.code2, + TableSettingsLanguages.enabled)\ + .order_by(TableSettingsLanguages.name).dicts() + result = list(result) + for item in result: + item['enabled'] = item['enabled'] == 1 + return jsonify(result) diff --git a/bazarr/api/system/languages_profiles.py b/bazarr/api/system/languages_profiles.py new file mode 100644 index 000000000..15e8f3cb7 --- /dev/null +++ b/bazarr/api/system/languages_profiles.py @@ -0,0 +1,13 @@ +# coding=utf-8 + +from flask import jsonify +from flask_restful import Resource + +from ..utils import authenticate +from database import get_profiles_list + + +class LanguagesProfiles(Resource): + @authenticate + def get(self): + return jsonify(get_profiles_list()) diff --git a/bazarr/api/system/logs.py b/bazarr/api/system/logs.py new file mode 100644 index 000000000..83b8546c2 --- /dev/null +++ b/bazarr/api/system/logs.py @@ -0,0 +1,41 @@ +# coding=utf-8 + +import io +import os + +from flask import jsonify +from flask_restful import Resource + +from ..utils import authenticate +from logger import empty_log +from get_args import args + + +class SystemLogs(Resource): + @authenticate + def get(self): + logs = [] + with io.open(os.path.join(args.config_dir, 'log', 'bazarr.log'), encoding='UTF-8') as file: + raw_lines = file.read() + lines = raw_lines.split('|\n') + for line in lines: + if line == '': + continue + raw_message = line.split('|') + raw_message_len = len(raw_message) + if raw_message_len > 3: + log = dict() + log["timestamp"] = raw_message[0] + log["type"] = raw_message[1].rstrip() + log["message"] = raw_message[3] + if raw_message_len > 4 and raw_message[4] != '\n': + log['exception'] = raw_message[4].strip('\'').replace(' ', '\u2003\u2003') + logs.append(log) + + logs.reverse() + return jsonify(data=logs) + + @authenticate + def delete(self): + empty_log() + return '', 204 diff --git a/bazarr/api/system/notifications.py b/bazarr/api/system/notifications.py new file mode 100644 index 000000000..4cb8e24a1 --- /dev/null +++ b/bazarr/api/system/notifications.py @@ -0,0 +1,27 @@ +# coding=utf-8 + +import apprise + +from flask import request +from flask_restful import Resource + +from ..utils import authenticate + + +class Notifications(Resource): + @authenticate + def patch(self): + url = request.form.get("url") + + asset = apprise.AppriseAsset(async_mode=False) + + apobj = apprise.Apprise(asset=asset) + + apobj.add(url) + + apobj.notify( + title='Bazarr test notification', + body='Test notification' + ) + + return '', 204 diff --git a/bazarr/api/system/releases.py b/bazarr/api/system/releases.py new file mode 100644 index 000000000..150cb083f --- /dev/null +++ b/bazarr/api/system/releases.py @@ -0,0 +1,47 @@ +# coding=utf-8 + +import io +import json +import os +import logging + +from flask import jsonify +from flask_restful import Resource + +from ..utils import authenticate +from config import settings +from get_args import args + + +class SystemReleases(Resource): + @authenticate + def get(self): + filtered_releases = [] + try: + with io.open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r', encoding='UTF-8') as f: + releases = json.loads(f.read()) + + for release in releases: + if settings.general.branch == 'master' and not release['prerelease']: + filtered_releases.append(release) + elif settings.general.branch != 'master' and any(not x['prerelease'] for x in filtered_releases): + continue + elif settings.general.branch != 'master': + filtered_releases.append(release) + if settings.general.branch == 'master': + filtered_releases = filtered_releases[:5] + + current_version = os.environ["BAZARR_VERSION"] + + for i, release in enumerate(filtered_releases): + body = release['body'].replace('- ', '').split('\n')[1:] + filtered_releases[i] = {"body": body, + "name": release['name'], + "date": release['date'][:10], + "prerelease": release['prerelease'], + "current": release['name'].lstrip('v') == current_version} + + except Exception: + logging.exception( + 'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt')) + return jsonify(data=filtered_releases) diff --git a/bazarr/api/system/searches.py b/bazarr/api/system/searches.py new file mode 100644 index 000000000..c85fa82bf --- /dev/null +++ b/bazarr/api/system/searches.py @@ -0,0 +1,41 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from ..utils import authenticate +from config import settings +from database import TableShows, TableMovies + + +class Searches(Resource): + @authenticate + def get(self): + query = request.args.get('query') + search_list = [] + + if query: + if settings.general.getboolean('use_sonarr'): + # Get matching series + series = TableShows.select(TableShows.title, + TableShows.sonarrSeriesId, + TableShows.year)\ + .where(TableShows.title.contains(query))\ + .order_by(TableShows.title)\ + .dicts() + series = list(series) + search_list += series + + if settings.general.getboolean('use_radarr'): + # Get matching movies + movies = TableMovies.select(TableMovies.title, + TableMovies.radarrId, + TableMovies.year) \ + .where(TableMovies.title.contains(query)) \ + .order_by(TableMovies.title) \ + .dicts() + movies = list(movies) + search_list += movies + + + return jsonify(search_list) diff --git a/bazarr/api/system/settings.py b/bazarr/api/system/settings.py new file mode 100644 index 000000000..8cdbc90f3 --- /dev/null +++ b/bazarr/api/system/settings.py @@ -0,0 +1,106 @@ +# coding=utf-8 + +import json + +from flask import request, jsonify +from flask_restful import Resource + +from ..utils import authenticate +from database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, TableSettingsNotifier, \ + update_profile_id_list +from event_handler import event_stream +from config import settings, save_settings, get_settings +from scheduler import scheduler +from list_subtitles import list_missing_subtitles, list_missing_subtitles_movies + + +class SystemSettings(Resource): + @authenticate + def get(self): + data = get_settings() + + notifications = TableSettingsNotifier.select().order_by(TableSettingsNotifier.name).dicts() + notifications = list(notifications) + for i, item in enumerate(notifications): + item["enabled"] = item["enabled"] == 1 + notifications[i] = item + + data['notifications'] = dict() + data['notifications']['providers'] = notifications + + return jsonify(data) + + @authenticate + def post(self): + enabled_languages = request.form.getlist('languages-enabled') + if len(enabled_languages) != 0: + TableSettingsLanguages.update({ + TableSettingsLanguages.enabled: 0 + }).execute() + for code in enabled_languages: + TableSettingsLanguages.update({ + TableSettingsLanguages.enabled: 1 + })\ + .where(TableSettingsLanguages.code2 == code)\ + .execute() + event_stream("languages") + + languages_profiles = request.form.get('languages-profiles') + if languages_profiles: + existing_ids = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId).dicts() + existing_ids = list(existing_ids) + existing = [x['profileId'] for x in existing_ids] + for item in json.loads(languages_profiles): + if item['profileId'] in existing: + # Update existing profiles + TableLanguagesProfiles.update({ + TableLanguagesProfiles.name: item['name'], + TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None, + TableLanguagesProfiles.items: json.dumps(item['items']), + TableLanguagesProfiles.mustContain: item['mustContain'], + TableLanguagesProfiles.mustNotContain: item['mustNotContain'], + })\ + .where(TableLanguagesProfiles.profileId == item['profileId'])\ + .execute() + existing.remove(item['profileId']) + else: + # Add new profiles + TableLanguagesProfiles.insert({ + TableLanguagesProfiles.profileId: item['profileId'], + TableLanguagesProfiles.name: item['name'], + TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None, + TableLanguagesProfiles.items: json.dumps(item['items']), + TableLanguagesProfiles.mustContain: item['mustContain'], + TableLanguagesProfiles.mustNotContain: item['mustNotContain'], + }).execute() + for profileId in existing: + # Unassign this profileId from series and movies + TableShows.update({ + TableShows.profileId: None + }).where(TableShows.profileId == profileId).execute() + TableMovies.update({ + TableMovies.profileId: None + }).where(TableMovies.profileId == profileId).execute() + # Remove deleted profiles + TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute() + + update_profile_id_list() + event_stream("languages") + + if settings.general.getboolean('use_sonarr'): + scheduler.add_job(list_missing_subtitles, kwargs={'send_event': False}) + if settings.general.getboolean('use_radarr'): + scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': False}) + + # Update Notification + notifications = request.form.getlist('notifications-providers') + for item in notifications: + item = json.loads(item) + TableSettingsNotifier.update({ + TableSettingsNotifier.enabled: item['enabled'], + TableSettingsNotifier.url: item['url'] + }).where(TableSettingsNotifier.name == item['name']).execute() + + save_settings(zip(request.form.keys(), request.form.listvalues())) + event_stream("settings") + return '', 204 diff --git a/bazarr/api/system/status.py b/bazarr/api/system/status.py new file mode 100644 index 000000000..4a8b69a16 --- /dev/null +++ b/bazarr/api/system/status.py @@ -0,0 +1,27 @@ +# coding=utf-8 + +import os +import platform + +from flask import jsonify +from flask_restful import Resource + +from ..utils import authenticate +from utils import get_sonarr_info, get_radarr_info +from get_args import args +from init import startTime + + +class SystemStatus(Resource): + @authenticate + def get(self): + system_status = {} + system_status.update({'bazarr_version': os.environ["BAZARR_VERSION"]}) + system_status.update({'sonarr_version': get_sonarr_info.version()}) + system_status.update({'radarr_version': get_radarr_info.version()}) + system_status.update({'operating_system': platform.platform()}) + system_status.update({'python_version': platform.python_version()}) + system_status.update({'bazarr_directory': os.path.dirname(os.path.dirname(__file__))}) + system_status.update({'bazarr_config_directory': args.config_dir}) + system_status.update({'start_time': startTime}) + return jsonify(data=system_status) diff --git a/bazarr/api/system/system.py b/bazarr/api/system/system.py new file mode 100644 index 000000000..98b614523 --- /dev/null +++ b/bazarr/api/system/system.py @@ -0,0 +1,18 @@ +# coding=utf-8 + +from flask import request +from flask_restful import Resource + +from ..utils import authenticate + + +class System(Resource): + @authenticate + def post(self): + from server import webserver + action = request.args.get('action') + if action == "shutdown": + webserver.shutdown() + elif action == "restart": + webserver.restart() + return '', 204 diff --git a/bazarr/api/system/tasks.py b/bazarr/api/system/tasks.py new file mode 100644 index 000000000..0bdfed429 --- /dev/null +++ b/bazarr/api/system/tasks.py @@ -0,0 +1,31 @@ +# coding=utf-8 + +from flask import request, jsonify +from flask_restful import Resource + +from ..utils import authenticate +from scheduler import scheduler + + +class SystemTasks(Resource): + @authenticate + def get(self): + taskid = request.args.get('taskid') + + task_list = scheduler.get_task_list() + + if taskid: + for item in task_list: + if item['job_id'] == taskid: + task_list = [item] + continue + + return jsonify(data=task_list) + + @authenticate + def post(self): + taskid = request.form.get('taskid') + + scheduler.execute_job_now(taskid) + + return '', 204 diff --git a/bazarr/api/utils.py b/bazarr/api/utils.py new file mode 100644 index 000000000..61e19c947 --- /dev/null +++ b/bazarr/api/utils.py @@ -0,0 +1,239 @@ +# coding=utf-8 + +import ast + +from functools import wraps +from flask import request, abort +from operator import itemgetter + +from config import settings, base_url +from get_languages import language_from_alpha2, alpha3_from_alpha2 +from database import get_audio_profile_languages, get_desired_languages +from helper import path_mappings + +None_Keys = ['null', 'undefined', '', None] + +False_Keys = ['False', 'false', '0'] + + +def authenticate(actual_method): + @wraps(actual_method) + def wrapper(*args, **kwargs): + apikey_settings = settings.auth.apikey + apikey_get = request.args.get('apikey') + apikey_post = request.form.get('apikey') + apikey_header = None + if 'X-API-KEY' in request.headers: + apikey_header = request.headers['X-API-KEY'] + + if apikey_settings in [apikey_get, apikey_post, apikey_header]: + return actual_method(*args, **kwargs) + + return abort(401) + + return wrapper + + +def postprocess(item): + # Remove ffprobe_cache + if 'ffprobe_cache' in item: + del (item['ffprobe_cache']) + + # Parse tags + if 'tags' in item: + if item['tags'] is None: + item['tags'] = [] + else: + item['tags'] = ast.literal_eval(item['tags']) + + if 'monitored' in item: + if item['monitored'] is None: + item['monitored'] = False + else: + item['monitored'] = item['monitored'] == 'True' + + if 'hearing_impaired' in item and item['hearing_impaired'] is not None: + if item['hearing_impaired'] is None: + item['hearing_impaired'] = False + else: + item['hearing_impaired'] = item['hearing_impaired'] == 'True' + + if 'language' in item: + if item['language'] == 'None': + item['language'] = None + elif item['language'] is not None: + splitted_language = item['language'].split(':') + item['language'] = {"name": language_from_alpha2(splitted_language[0]), + "code2": splitted_language[0], + "code3": alpha3_from_alpha2(splitted_language[0]), + "forced": True if item['language'].endswith(':forced') else False, + "hi": True if item['language'].endswith(':hi') else False} + + +def postprocessSeries(item): + postprocess(item) + # Parse audio language + if 'audio_language' in item and item['audio_language'] is not None: + item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId']) + + if 'alternateTitles' in item: + if item['alternateTitles'] is None: + item['alternativeTitles'] = [] + else: + item['alternativeTitles'] = ast.literal_eval(item['alternateTitles']) + del item["alternateTitles"] + + # Parse seriesType + if 'seriesType' in item and item['seriesType'] is not None: + item['seriesType'] = item['seriesType'].capitalize() + + if 'path' in item: + item['path'] = path_mappings.path_replace(item['path']) + + # map poster and fanart to server proxy + if 'poster' in item: + poster = item['poster'] + item['poster'] = f"{base_url}/images/series{poster}" if poster else None + + if 'fanart' in item: + fanart = item['fanart'] + item['fanart'] = f"{base_url}/images/series{fanart}" if fanart else None + + +def postprocessEpisode(item): + postprocess(item) + if 'audio_language' in item and item['audio_language'] is not None: + item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId']) + + if 'subtitles' in item: + if item['subtitles'] is None: + raw_subtitles = [] + else: + raw_subtitles = ast.literal_eval(item['subtitles']) + subtitles = [] + + for subs in raw_subtitles: + subtitle = subs[0].split(':') + sub = {"name": language_from_alpha2(subtitle[0]), + "code2": subtitle[0], + "code3": alpha3_from_alpha2(subtitle[0]), + "path": path_mappings.path_replace(subs[1]), + "forced": False, + "hi": False} + if len(subtitle) > 1: + sub["forced"] = True if subtitle[1] == 'forced' else False + sub["hi"] = True if subtitle[1] == 'hi' else False + + subtitles.append(sub) + + item.update({"subtitles": subtitles}) + + # Parse missing subtitles + if 'missing_subtitles' in item: + if item['missing_subtitles'] is None: + item['missing_subtitles'] = [] + else: + item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles']) + for i, subs in enumerate(item['missing_subtitles']): + subtitle = subs.split(':') + item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]), + "code2": subtitle[0], + "code3": alpha3_from_alpha2(subtitle[0]), + "forced": False, + "hi": False} + if len(subtitle) > 1: + item['missing_subtitles'][i].update({ + "forced": True if subtitle[1] == 'forced' else False, + "hi": True if subtitle[1] == 'hi' else False + }) + + if 'scene_name' in item: + item["sceneName"] = item["scene_name"] + del item["scene_name"] + + if 'path' in item and item['path']: + # Provide mapped path + item['path'] = path_mappings.path_replace(item['path']) + + +# TODO: Move +def postprocessMovie(item): + postprocess(item) + # Parse audio language + if 'audio_language' in item and item['audio_language'] is not None: + item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId']) + + # Parse alternate titles + if 'alternativeTitles' in item: + if item['alternativeTitles'] is None: + item['alternativeTitles'] = [] + else: + item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles']) + + # Parse failed attempts + if 'failedAttempts' in item: + if item['failedAttempts']: + item['failedAttempts'] = ast.literal_eval(item['failedAttempts']) + + # Parse subtitles + if 'subtitles' in item: + if item['subtitles'] is None: + item['subtitles'] = [] + else: + item['subtitles'] = ast.literal_eval(item['subtitles']) + for i, subs in enumerate(item['subtitles']): + language = subs[0].split(':') + item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]), + "name": language_from_alpha2(language[0]), + "code2": language[0], + "code3": alpha3_from_alpha2(language[0]), + "forced": False, + "hi": False} + if len(language) > 1: + item['subtitles'][i].update({ + "forced": True if language[1] == 'forced' else False, + "hi": True if language[1] == 'hi' else False + }) + + if settings.general.getboolean('embedded_subs_show_desired'): + desired_lang_list = get_desired_languages(item['profileId']) + item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']] + + item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced')) + + # Parse missing subtitles + if 'missing_subtitles' in item: + if item['missing_subtitles'] is None: + item['missing_subtitles'] = [] + else: + item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles']) + for i, subs in enumerate(item['missing_subtitles']): + language = subs.split(':') + item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]), + "code2": language[0], + "code3": alpha3_from_alpha2(language[0]), + "forced": False, + "hi": False} + if len(language) > 1: + item['missing_subtitles'][i].update({ + "forced": True if language[1] == 'forced' else False, + "hi": True if language[1] == 'hi' else False + }) + + # Provide mapped path + if 'path' in item: + if item['path']: + item['path'] = path_mappings.path_replace_movie(item['path']) + + if 'subtitles_path' in item: + # Provide mapped subtitles path + item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path']) + + # map poster and fanart to server proxy + if 'poster' in item: + poster = item['poster'] + item['poster'] = f"{base_url}/images/movies{poster}" if poster else None + + if 'fanart' in item: + fanart = item['fanart'] + item['fanart'] = f"{base_url}/images/movies{fanart}" if fanart else None diff --git a/bazarr/api/webhooks/__init__.py b/bazarr/api/webhooks/__init__.py new file mode 100644 index 000000000..76b8d62ff --- /dev/null +++ b/bazarr/api/webhooks/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 + +from flask import Blueprint +from flask_restful import Api + +from .plex import WebHooksPlex + + +api_bp_webhooks = Blueprint('api_webhooks', __name__) +api = Api(api_bp_webhooks) + +api.add_resource(WebHooksPlex, '/webhooks/plex') diff --git a/bazarr/api/webhooks/plex.py b/bazarr/api/webhooks/plex.py new file mode 100644 index 000000000..6cc6e9da8 --- /dev/null +++ b/bazarr/api/webhooks/plex.py @@ -0,0 +1,76 @@ +# coding=utf-8 + +import json +import requests +import os +import re + +from flask import request +from flask_restful import Resource +from bs4 import BeautifulSoup as bso + +from database import TableEpisodes, TableShows, TableMovies +from get_subtitle import episode_download_subtitles, movies_download_subtitles +from ..utils import authenticate + + +class WebHooksPlex(Resource): + @authenticate + def post(self): + json_webhook = request.form.get('payload') + parsed_json_webhook = json.loads(json_webhook) + + event = parsed_json_webhook['event'] + if event not in ['media.play']: + return '', 204 + + media_type = parsed_json_webhook['Metadata']['type'] + + if media_type == 'episode': + season = parsed_json_webhook['Metadata']['parentIndex'] + episode = parsed_json_webhook['Metadata']['index'] + else: + season = episode = None + + ids = [] + for item in parsed_json_webhook['Metadata']['Guid']: + splitted_id = item['id'].split('://') + if len(splitted_id) == 2: + ids.append({splitted_id[0]: splitted_id[1]}) + if not ids: + return '', 404 + + if media_type == 'episode': + try: + episode_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0] + r = requests.get('https://imdb.com/title/{}'.format(episode_imdb_id), + headers={"User-Agent": os.environ["SZ_USER_AGENT"]}) + soup = bso(r.content, "html.parser") + series_imdb_id = soup.find('a', {'class': re.compile(r'SeriesParentLink__ParentTextLink')})['href'].split('/')[2] + except: + return '', 404 + else: + sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \ + .join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \ + .where(TableShows.imdbId == series_imdb_id, + TableEpisodes.season == season, + TableEpisodes.episode == episode) \ + .dicts() \ + .get() + + if sonarrEpisodeId: + episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True) + else: + try: + movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0] + except: + return '', 404 + else: + radarrId = TableMovies.select(TableMovies.radarrId)\ + .where(TableMovies.imdbId == movie_imdb_id)\ + .dicts()\ + .get() + if radarrId: + movies_download_subtitles(no=radarrId['radarrId']) + + return '', 200 diff --git a/bazarr/app.py b/bazarr/app.py index a67810c57..48560e443 100644 --- a/bazarr/app.py +++ b/bazarr/app.py @@ -28,7 +28,8 @@ def create_app(): else: app.config["DEBUG"] = False - socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*', async_mode='threading') + socketio.init_app(app, path=base_url.rstrip('/')+'/api/socket.io', cors_allowed_origins='*', + async_mode='threading', allow_upgrades=False, transports='polling') return app diff --git a/bazarr/check_update.py b/bazarr/check_update.py index 96c88e92d..3b73962a6 100644 --- a/bazarr/check_update.py +++ b/bazarr/check_update.py @@ -193,7 +193,7 @@ def update_cleaner(zipfile, bazarr_dir, config_dir): '^venv' + separator, '^WinPython' + separator, separator + '__pycache__' + separator + '$'] - if os.path.abspath(bazarr_dir) in os.path.abspath(config_dir): + if os.path.abspath(bazarr_dir).lower() in os.path.abspath(config_dir).lower(): dir_to_ignore.append('^' + os.path.relpath(config_dir, bazarr_dir) + os.path.sep) dir_to_ignore_regex = re.compile('(?:% s)' % '|'.join(dir_to_ignore)) logging.debug('BAZARR upgrade leftover cleaner will ignore directories matching this regex: ' diff --git a/bazarr/config.py b/bazarr/config.py index af581c352..62eba3088 100644 --- a/bazarr/config.py +++ b/bazarr/config.py @@ -57,6 +57,8 @@ defaults = { 'ignore_vobsub_subs': 'False', 'ignore_ass_subs': 'False', 'adaptive_searching': 'False', + 'adaptive_searching_delay': '3w', + 'adaptive_searching_delta': '1w', 'enabled_providers': '[]', 'multithreading': 'True', 'chmod_enabled': 'False', @@ -92,7 +94,8 @@ defaults = { 'episodes_sync': '60', 'excluded_tags': '[]', 'excluded_series_types': '[]', - 'use_ffprobe_cache': 'True' + 'use_ffprobe_cache': 'True', + 'exclude_season_zero': 'False' }, 'radarr': { 'ip': '127.0.0.1', @@ -132,7 +135,8 @@ defaults = { }, 'addic7ed': { 'username': '', - 'password': '' + 'password': '', + 'vip': 'False' }, 'podnapisi': { 'verify_ssl': 'True' @@ -190,6 +194,10 @@ defaults = { 'approved_only': 'False', 'multithreading': 'True' }, + 'embeddedsubtitles': { + 'include_ass': 'True', + 'include_srt': 'True', + }, 'subsync': { 'use_subsync': 'False', 'use_subsync_threshold': 'False', @@ -388,12 +396,12 @@ def save_settings(settings_items): configure_proxy = True if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored', - 'settings-sonarr-excluded_series_types', 'settings.radarr.excluded_tags', - 'settings-radarr-only_monitored']: + 'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero', + 'settings.radarr.excluded_tags', 'settings-radarr-only_monitored']: exclusion_updated = True if key in ['settings-sonarr-excluded_tags', 'settings-sonarr-only_monitored', - 'settings-sonarr-excluded_series_types']: + 'settings-sonarr-excluded_series_types', 'settings-sonarr-exclude_season_zero']: sonarr_exclusion_updated = True if key in ['settings.radarr.excluded_tags', 'settings-radarr-only_monitored']: @@ -463,8 +471,10 @@ def save_settings(settings_items): configure_captcha_func() if update_schedule: - from api import scheduler + from scheduler import scheduler + from event_handler import event_stream scheduler.update_configurable_tasks() + event_stream(type='task') if sonarr_changed: from signalr_client import sonarr_signalr_client diff --git a/bazarr/database.py b/bazarr/database.py index 85f420110..03ce0756b 100644 --- a/bazarr/database.py +++ b/bazarr/database.py @@ -136,6 +136,8 @@ class TableLanguagesProfiles(BaseModel): items = TextField() name = TextField() profileId = AutoField() + mustContain = TextField(null=True) + mustNotContain = TextField(null=True) class Meta: table_name = 'table_languages_profiles' @@ -329,7 +331,9 @@ def migrate_db(): migrator.add_column('table_history_movie', 'provider', TextField(null=True)), migrator.add_column('table_history_movie', 'score', TextField(null=True)), migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)), - migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)) + migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)), + migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)), + migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)), ) @@ -386,6 +390,10 @@ def get_exclusion_clause(exclusion_type): for item in typesList: where_clause.append((TableShows.seriesType != item)) + exclude_season_zero = settings.sonarr.getboolean('exclude_season_zero') + if exclude_season_zero: + where_clause.append((TableEpisodes.season != 0)) + return where_clause @@ -394,10 +402,16 @@ def update_profile_id_list(): profile_id_list = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId, TableLanguagesProfiles.name, TableLanguagesProfiles.cutoff, - TableLanguagesProfiles.items).dicts() + TableLanguagesProfiles.items, + TableLanguagesProfiles.mustContain, + TableLanguagesProfiles.mustNotContain).dicts() profile_id_list = list(profile_id_list) for profile in profile_id_list: profile['items'] = json.loads(profile['items']) + profile['mustContain'] = ast.literal_eval(profile['mustContain']) if profile['mustContain'] else \ + profile['mustContain'] + profile['mustNotContain'] = ast.literal_eval(profile['mustNotContain']) if profile['mustNotContain'] else \ + profile['mustNotContain'] def get_profiles_list(profile_id=None): @@ -422,7 +436,7 @@ def get_desired_languages(profile_id): if profile_id and profile_id != 'null': for profile in profile_id_list: - profileId, name, cutoff, items = profile.values() + profileId, name, cutoff, items, mustContain, mustNotContain = profile.values() if profileId == int(profile_id): languages = [x['language'] for x in items] break @@ -438,7 +452,7 @@ def get_profile_id_name(profile_id): if profile_id and profile_id != 'null': for profile in profile_id_list: - profileId, name, cutoff, items = profile.values() + profileId, name, cutoff, items, mustContain, mustNotContain = profile.values() if profileId == int(profile_id): name_from_id = name break @@ -455,7 +469,7 @@ def get_profile_cutoff(profile_id): if profile_id and profile_id != 'null': cutoff_language = [] for profile in profile_id_list: - profileId, name, cutoff, items = profile.values() + profileId, name, cutoff, items, mustContain, mustNotContain = profile.values() if cutoff: if profileId == int(profile_id): for item in items: @@ -498,6 +512,22 @@ def get_audio_profile_languages(series_id=None, episode_id=None, movie_id=None): return audio_languages +def get_profile_id(series_id=None, episode_id=None, movie_id=None): + if series_id: + profileId = TableShows.get(TableShows.sonarrSeriesId == series_id).profileId + elif episode_id: + profileId = TableShows.select(TableShows.profileId)\ + .join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId))\ + .where(TableEpisodes.sonarrEpisodeId == episode_id)\ + .get().profileId + elif movie_id: + profileId = TableMovies.get(TableMovies.radarrId == movie_id).profileId + else: + return None + + return profileId + + def convert_list_to_clause(arr: list): if isinstance(arr, list): return f"({','.join(str(x) for x in arr)})" diff --git a/bazarr/get_providers.py b/bazarr/get_providers.py index ab3974c4b..fd21cd380 100644 --- a/bazarr/get_providers.py +++ b/bazarr/get_providers.py @@ -8,11 +8,11 @@ import pretty import time import socket import requests -import ast from get_args import args from config import settings, get_array_from from event_handler import event_stream +from utils import get_binary from subliminal_patch.exceptions import TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked from subliminal.providers.opensubtitles import DownloadLimitReached from subliminal.exceptions import DownloadLimitExceeded, ServiceUnavailable @@ -126,6 +126,7 @@ def get_providers_auth(): 'addic7ed' : { 'username': settings.addic7ed.username, 'password': settings.addic7ed.password, + 'is_vip': settings.addic7ed.getboolean('vip'), }, 'opensubtitles' : { 'username' : settings.opensubtitles.username, @@ -198,6 +199,13 @@ def get_providers_auth(): 'email': settings.ktuvit.email, 'hashed_password': settings.ktuvit.hashed_password, }, + 'embeddedsubtitles': { + 'include_ass': settings.embeddedsubtitles.getboolean('include_ass'), + 'include_srt': settings.embeddedsubtitles.getboolean('include_srt'), + 'cache_dir': os.path.join(args.config_dir, "cache"), + 'ffprobe_path': get_binary("ffprobe"), + 'ffmpeg_path': get_binary("ffmpeg"), + } } @@ -315,12 +323,17 @@ def reset_throttled_providers(): def get_throttled_providers(): providers = {} - if os.path.exists(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')): - with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as handle: - providers = handle.read() - if not providers: - providers = {} - return providers + try: + if os.path.exists(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')): + with open(os.path.normpath(os.path.join(args.config_dir, 'config', 'throttled_providers.dat')), 'r') as \ + handle: + providers = eval(handle.read()) + except: + # set empty content in throttled_providers.dat + logging.error("Invalid content in throttled_providers.dat. Resetting") + set_throttled_providers(providers) + finally: + return providers def set_throttled_providers(data): @@ -328,12 +341,6 @@ def set_throttled_providers(data): handle.write(data) -try: - tp = eval(str(get_throttled_providers())) - if not isinstance(tp, dict): - raise ValueError('tp should be a dict') -except Exception: - logging.error("Invalid content in throttled_providers.dat. Resetting") - # set empty content in throttled_providers.dat - set_throttled_providers('') - tp = eval(str(get_throttled_providers())) +tp = get_throttled_providers() +if not isinstance(tp, dict): + raise ValueError('tp should be a dict') diff --git a/bazarr/get_series.py b/bazarr/get_series.py index c92af286b..9d95e6100 100644 --- a/bazarr/get_series.py +++ b/bazarr/get_series.py @@ -117,10 +117,10 @@ def update_series(send_event=True): logging.debug('BAZARR unable to insert this series into the database:', path_mappings.path_replace(added_series['path'])) - if send_event: - event_stream(type='series', action='update', payload=added_series['sonarrSeriesId']) + if send_event: + event_stream(type='series', action='update', payload=added_series['sonarrSeriesId']) - logging.debug('BAZARR All series synced from Sonarr into database.') + logging.debug('BAZARR All series synced from Sonarr into database.') def update_one_series(series_id, action): diff --git a/bazarr/get_subtitle.py b/bazarr/get_subtitle.py index 9bf0875f9..fcf69bc14 100644 --- a/bazarr/get_subtitle.py +++ b/bazarr/get_subtitle.py @@ -84,7 +84,7 @@ def get_video(path, title, sceneName, providers=None, media_type="movie"): def download_subtitle(path, language, audio_language, hi, forced, providers, providers_auth, sceneName, title, - media_type, forced_minimum_score=None, is_upgrade=False): + media_type, forced_minimum_score=None, is_upgrade=False, profile_id=None): # fixme: supply all missing languages, not only one, to hit providers only once who support multiple languages in # one query @@ -158,6 +158,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro compute_score=compute_score, throttle_time=None, # fixme blacklist=get_blacklist(media_type=media_type), + ban_list=get_ban_list(profile_id), throttle_callback=provider_throttle, score_obj=handler, pre_download_hook=None, # fixme @@ -216,6 +217,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro action = "upgraded" else: action = "downloaded" + percent_score = round(subtitle.score * 100 / max_score, 2) message = downloaded_language + modifier_string + " subtitles " + action + " from " + \ downloaded_provider + " with a score of " + str(percent_score) + "%." @@ -229,6 +231,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro series_id = episode_metadata['sonarrSeriesId'] episode_id = episode_metadata['sonarrEpisodeId'] sync_subtitles(video_path=path, srt_path=downloaded_path, + forced=subtitle.language.forced, srt_lang=downloaded_language_code2, media_type=media_type, percent_score=percent_score, sonarr_series_id=episode_metadata['sonarrSeriesId'], @@ -241,6 +244,7 @@ def download_subtitle(path, language, audio_language, hi, forced, providers, pro series_id = "" episode_id = movie_metadata['radarrId'] sync_subtitles(video_path=path, srt_path=downloaded_path, + forced=subtitle.language.forced, srt_lang=downloaded_language_code2, media_type=media_type, percent_score=percent_score, radarr_id=movie_metadata['radarrId']) @@ -358,6 +362,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title, providers=providers, provider_configs=providers_auth, blacklist=get_blacklist(media_type=media_type), + ban_list=get_ban_list(profileId), throttle_callback=provider_throttle, language_hook=None) # fixme @@ -372,6 +377,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title, providers=['subscene'], provider_configs=providers_auth, blacklist=get_blacklist(media_type=media_type), + ban_list=get_ban_list(profileId), throttle_callback=provider_throttle, language_hook=None) # fixme providers_auth['subscene']['only_foreign'] = False @@ -415,9 +421,10 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title, score, score_without_hash = compute_score(matches, s, video, hearing_impaired=initial_hi, score_obj=handler) if 'hash' not in matches: not_matched = scores - matches + s.score = score_without_hash else: + s.score = score not_matched = set() - s.score = score_without_hash if s.hearing_impaired == initial_hi: matches.add('hearing_impaired') @@ -462,7 +469,7 @@ def manual_search(path, profileId, providers, providers_auth, sceneName, title, def manual_download_subtitle(path, language, audio_language, hi, forced, subtitle, provider, providers_auth, sceneName, - title, media_type): + title, media_type, profile_id): logging.debug('BAZARR Manually downloading Subtitles for this file: ' + path) if settings.general.getboolean('utf8_encode'): @@ -494,6 +501,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl provider_configs=providers_auth, pool_class=provider_pool(), blacklist=get_blacklist(media_type=media_type), + ban_list=get_ban_list(profile_id), throttle_callback=provider_throttle) logging.debug('BAZARR Subtitles file downloaded for this file:' + path) else: @@ -552,6 +560,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl series_id = episode_metadata['sonarrSeriesId'] episode_id = episode_metadata['sonarrEpisodeId'] sync_subtitles(video_path=path, srt_path=downloaded_path, + forced=subtitle.language.forced, srt_lang=downloaded_language_code2, media_type=media_type, percent_score=score, sonarr_series_id=episode_metadata['sonarrSeriesId'], @@ -564,6 +573,7 @@ def manual_download_subtitle(path, language, audio_language, hi, forced, subtitl series_id = "" episode_id = movie_metadata['radarrId'] sync_subtitles(video_path=path, srt_path=downloaded_path, + forced=subtitle.language.forced, srt_lang=downloaded_language_code2, media_type=media_type, percent_score=score, radarr_id=movie_metadata['radarrId']) @@ -696,7 +706,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_ series_id = episode_metadata['sonarrSeriesId'] episode_id = episode_metadata['sonarrEpisodeId'] sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type, - percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'], + percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'], forced=forced, sonarr_episode_id=episode_metadata['sonarrEpisodeId']) else: movie_metadata = TableMovies.select(TableMovies.radarrId)\ @@ -706,7 +716,7 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_ series_id = "" episode_id = movie_metadata['radarrId'] sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type, - percent_score=100, radarr_id=movie_metadata['radarrId']) + percent_score=100, radarr_id=movie_metadata['radarrId'], forced=forced) if use_postprocessing : command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, @@ -755,12 +765,13 @@ def series_download_subtitles(no): "ignored because of monitored status, series type or series tags: {}".format(no)) return - providers_list = get_providers() providers_auth = get_providers_auth() count_episodes_details = len(episodes_details) for i, episode in enumerate(episodes_details): + providers_list = get_providers() + if providers_list: show_progress(id='series_search_progress_{}'.format(no), header='Searching missing subtitles...', @@ -845,10 +856,11 @@ def episode_download_subtitles(no, send_progress=False): logging.debug("BAZARR no episode with that sonarrEpisodeId can be found in database:", str(no)) return - providers_list = get_providers() providers_auth = get_providers_auth() for episode in episodes_details: + providers_list = get_providers() + if providers_list: if send_progress: show_progress(id='episode_search_progress_{}'.format(no), @@ -929,7 +941,6 @@ def movies_download_subtitles(no): else: movie = movies[0] - providers_list = get_providers() providers_auth = get_providers_auth() if ast.literal_eval(movie['missing_subtitles']): @@ -938,15 +949,17 @@ def movies_download_subtitles(no): count_movie = 0 for i, language in enumerate(ast.literal_eval(movie['missing_subtitles'])): - # confirm if language is still missing or if cutoff have been reached - confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles)\ - .where(TableMovies.radarrId == movie['radarrId'])\ - .dicts()\ - .get() - if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): - continue + providers_list = get_providers() if providers_list: + # confirm if language is still missing or if cutoff have been reached + confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \ + .where(TableMovies.radarrId == movie['radarrId']) \ + .dicts() \ + .get() + if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): + continue + show_progress(id='movie_search_progress_{}'.format(no), header='Searching missing subtitles...', name=movie['title'], @@ -1008,77 +1021,71 @@ def wanted_download_subtitles(sonarr_episode_id): .dicts() episodes_details = list(episodes_details) - providers_list = get_providers() providers_auth = get_providers_auth() for episode in episodes_details: - attempt = episode['failedAttempts'] - if type(attempt) == str: - attempt = ast.literal_eval(attempt) - for language in ast.literal_eval(episode['missing_subtitles']): - # confirm if language is still missing or if cutoff have been reached - confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \ - .where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \ - .dicts() \ - .get() - if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): - continue + providers_list = get_providers() - if attempt is None: - attempt = [] - attempt.append([language, time.time()]) - else: - att = list(zip(*attempt))[0] - if language not in att: - attempt.append([language, time.time()]) + if providers_list: + for language in ast.literal_eval(episode['missing_subtitles']): + # confirm if language is still missing or if cutoff have been reached + confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \ + .where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \ + .dicts() \ + .get() + if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): + continue - TableEpisodes.update({TableEpisodes.failedAttempts: str(attempt)})\ - .where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId'])\ - .execute() + if is_search_active(desired_language=language, attempt_string=episode['failedAttempts']): + TableEpisodes.update({TableEpisodes.failedAttempts: + updateFailedAttempts(desired_language=language, + attempt_string=episode['failedAttempts'])}) \ + .where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \ + .execute() - for i in range(len(attempt)): - if attempt[i][0] == language: - if search_active(attempt[i][1]): - audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId']) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = 'None' - - result = download_subtitle(path_mappings.path_replace(episode['path']), - language.split(':')[0], - audio_language, - "True" if language.endswith(':hi') else "False", - "True" if language.endswith(':forced') else "False", - providers_list, - providers_auth, - str(episode['scene_name']), - episode['title'], - 'series') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - store_subtitles(episode['path'], path_mappings.path_replace(episode['path'])) - history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path, - language_code, provider, score, subs_id, subs_path) - event_stream(type='series', action='update', payload=episode['sonarrSeriesId']) - event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId']) - send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message) + audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId']) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] else: - logging.debug( - 'BAZARR Search is not active for episode ' + episode['path'] + ' Language: ' + attempt[i][ - 0]) + audio_language = 'None' + + result = download_subtitle(path_mappings.path_replace(episode['path']), + language.split(':')[0], + audio_language, + "True" if language.endswith(':hi') else "False", + "True" if language.endswith(':forced') else "False", + providers_list, + providers_auth, + str(episode['scene_name']), + episode['title'], + 'series') + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + store_subtitles(episode['path'], path_mappings.path_replace(episode['path'])) + history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path, + language_code, provider, score, subs_id, subs_path) + event_stream(type='series', action='update', payload=episode['sonarrSeriesId']) + event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId']) + send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message) + else: + logging.debug( + f"BAZARR Search is throttled by adaptive search for this episode {episode['path']} and " + f"language: {language}") + else: + logging.info("BAZARR All providers are throttled") + break def wanted_download_subtitles_movie(radarr_id): @@ -1093,76 +1100,69 @@ def wanted_download_subtitles_movie(radarr_id): .dicts() movies_details = list(movies_details) - providers_list = get_providers() providers_auth = get_providers_auth() for movie in movies_details: - attempt = movie['failedAttempts'] - if type(attempt) == str: - attempt = ast.literal_eval(attempt) - for language in ast.literal_eval(movie['missing_subtitles']): - # confirm if language is still missing or if cutoff have been reached - confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \ - .where(TableMovies.radarrId == movie['radarrId']) \ - .dicts() \ - .get() - if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): - continue + providers_list = get_providers() - if attempt is None: - attempt = [] - attempt.append([language, time.time()]) - else: - att = list(zip(*attempt))[0] - if language not in att: - attempt.append([language, time.time()]) + if providers_list: + for language in ast.literal_eval(movie['missing_subtitles']): + # confirm if language is still missing or if cutoff have been reached + confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \ + .where(TableMovies.radarrId == movie['radarrId']) \ + .dicts() \ + .get() + if language not in ast.literal_eval(confirmed_missing_subs['missing_subtitles']): + continue - TableMovies.update({TableMovies.failedAttempts: str(attempt)})\ - .where(TableMovies.radarrId == movie['radarrId'])\ - .execute() + if is_search_active(desired_language=language, attempt_string=movie['failedAttempts']): + TableMovies.update({TableMovies.failedAttempts: + updateFailedAttempts(desired_language=language, + attempt_string=movie['failedAttempts'])}) \ + .where(TableMovies.radarrId == movie['radarrId']) \ + .execute() - for i in range(len(attempt)): - if attempt[i][0] == language: - if search_active(attempt[i][1]) is True: - audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId']) - if len(audio_language_list) > 0: - audio_language = audio_language_list[0]['name'] - else: - audio_language = 'None' - - result = download_subtitle(path_mappings.path_replace_movie(movie['path']), - language.split(':')[0], - audio_language, - "True" if language.endswith(':hi') else "False", - "True" if language.endswith(':forced') else "False", - providers_list, - providers_auth, - str(movie['sceneName']), - movie['title'], - 'movie') - if result is not None: - message = result[0] - path = result[1] - forced = result[5] - if result[8]: - language_code = result[2] + ":hi" - elif forced: - language_code = result[2] + ":forced" - else: - language_code = result[2] - provider = result[3] - score = result[4] - subs_id = result[6] - subs_path = result[7] - store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path'])) - history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score, - subs_id, subs_path) - event_stream(type='movie-wanted', action='delete', payload=movie['radarrId']) - send_notifications_movie(movie['radarrId'], message) + audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId']) + if len(audio_language_list) > 0: + audio_language = audio_language_list[0]['name'] else: - logging.info( - 'BAZARR Search is not active for this Movie ' + movie['path'] + ' Language: ' + attempt[i][ - 0]) + audio_language = 'None' + + result = download_subtitle(path_mappings.path_replace_movie(movie['path']), + language.split(':')[0], + audio_language, + "True" if language.endswith(':hi') else "False", + "True" if language.endswith(':forced') else "False", + providers_list, + providers_auth, + str(movie['sceneName']), + movie['title'], + 'movie') + if result is not None: + message = result[0] + path = result[1] + forced = result[5] + if result[8]: + language_code = result[2] + ":hi" + elif forced: + language_code = result[2] + ":forced" + else: + language_code = result[2] + provider = result[3] + score = result[4] + subs_id = result[6] + subs_path = result[7] + store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path'])) + history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score, + subs_id, subs_path) + event_stream(type='movie-wanted', action='delete', payload=movie['radarrId']) + send_notifications_movie(movie['radarrId'], message) + else: + logging.info(f"BAZARR Search is throttled by adaptive search for this movie {movie['path']} and " + f"language: {language}") + else: + logging.info("BAZARR All providers are throttled") + break def wanted_search_missing_subtitles_series(): @@ -1236,25 +1236,6 @@ def wanted_search_missing_subtitles_movies(): logging.info('BAZARR Finished searching for missing Movies Subtitles. Check History for more information.') -def search_active(timestamp): - if settings.general.getboolean('adaptive_searching'): - search_deadline = timedelta(weeks=3) - search_delta = timedelta(weeks=1) - aa = datetime.fromtimestamp(float(timestamp)) - attempt_datetime = datetime.strptime(str(aa).split(".")[0], '%Y-%m-%d %H:%M:%S') - attempt_search_deadline = attempt_datetime + search_deadline - today = datetime.today() - attempt_age_in_days = (today.date() - attempt_search_deadline.date()).days - if today.date() <= attempt_search_deadline.date(): - return True - elif attempt_age_in_days % search_delta.days == 0: - return True - else: - return False - else: - return True - - def convert_to_guessit(guessit_key, attr_from_db): try: return guessit(attr_from_db)[guessit_key] @@ -1490,11 +1471,12 @@ def upgrade_subtitles(): count_movie_to_upgrade = len(movies_to_upgrade) - providers_list = get_providers() providers_auth = get_providers_auth() if settings.general.getboolean('use_sonarr'): for i, episode in enumerate(episodes_to_upgrade): + providers_list = get_providers() + show_progress(id='upgrade_episodes_progress', header='Upgrading episodes subtitles...', name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['seriesTitle'], @@ -1504,8 +1486,7 @@ def upgrade_subtitles(): value=i, count=count_episode_to_upgrade) - providers = get_providers() - if not providers: + if not providers_list: logging.info("BAZARR All providers are throttled") return if episode['language'].endswith('forced'): @@ -1562,17 +1543,15 @@ def upgrade_subtitles(): if settings.general.getboolean('use_radarr'): for i, movie in enumerate(movies_to_upgrade): + providers_list = get_providers() + show_progress(id='upgrade_movies_progress', header='Upgrading movies subtitles...', name=movie['title'], value=i, count=count_movie_to_upgrade) - providers = get_providers() - if not providers: - logging.info("BAZARR All providers are throttled") - return - if not providers: + if not providers_list: logging.info("BAZARR All providers are throttled") return if movie['language'].endswith('forced'): @@ -1660,9 +1639,15 @@ def postprocessing(command, path): logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out) -def sync_subtitles(video_path, srt_path, srt_lang, media_type, percent_score, sonarr_series_id=None, +def sync_subtitles(video_path, srt_path, srt_lang, forced, media_type, percent_score, sonarr_series_id=None, sonarr_episode_id=None, radarr_id=None): - if settings.subsync.getboolean('use_subsync'): + if forced: + logging.debug('BAZARR cannot sync forced subtitles. Skipping sync routine.') + elif not settings.subsync.getboolean('use_subsync'): + logging.debug('BAZARR automatic syncing is disabled in settings. Skipping sync routine.') + else: + logging.debug(f'BAZARR automatic syncing is enabled in settings. We\'ll try to sync this ' + f'subtitles: {srt_path}.') if media_type == 'series': use_subsync_threshold = settings.subsync.getboolean('use_subsync_threshold') subsync_threshold = settings.subsync.subsync_threshold @@ -1694,6 +1679,7 @@ def _get_lang_obj(alpha3): return sub.subzero_language() + def _get_scores(media_type, min_movie=None, min_ep=None): series = "series" == media_type handler = series_score if series else movie_score @@ -1701,3 +1687,154 @@ def _get_scores(media_type, min_movie=None, min_ep=None): min_ep = min_ep or (240 * 100 / handler.max_score) min_score_ = int(min_ep if series else min_movie) return handler.get_scores(min_score_) + + +def get_ban_list(profile_id): + if profile_id: + profile = get_profiles_list(profile_id) + if profile: + return {'must_contain': profile['mustContain'] or [], + 'must_not_contain': profile['mustNotContain'] or []} + return None + + +def is_search_active(desired_language, attempt_string): + """ + Function to test if it's time to search again after a previous attempt matching the desired language. For 3 weeks, + we search on a scheduled basis but after 3 weeks we start searching only once a week. + + @param desired_language: 2 letters language to search for in attempts + @type desired_language: str + @param attempt_string: string representation of a list of lists from database column failedAttempts + @type attempt_string: str + + @return: return True if it's time to search again and False if not + @rtype: bool + """ + + if settings.general.getboolean('adaptive_searching'): + logging.debug("Adaptive searching is enable, we'll see if it's time to search again...") + try: + # let's try to get a list of lists from the string representation in database + attempts = ast.literal_eval(attempt_string) + if type(attempts) is not list: + # attempts should be a list if not, it's malformed or None + raise ValueError + except ValueError: + logging.debug("Adaptive searching: attempts is malformed. As a failsafe, search will run.") + return True + + if not len(attempts): + logging.debug("Adaptive searching: attempts list is empty, search will run.") + return True + + # get attempts matching the desired language and sort them by timestamp ascending + matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1]) + + if not len(matching_attempts): + logging.debug("Adaptive searching: there's no attempts matching desired language, search will run.") + return True + else: + logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}") + + # try to get the initial and latest search timestamp from matching attempts + initial_search_attempt = matching_attempts[0] + latest_search_attempt = matching_attempts[-1] + + # try to parse the timestamps for those attempts + try: + initial_search_timestamp = datetime.fromtimestamp(initial_search_attempt[1]) + latest_search_timestamp = datetime.fromtimestamp(latest_search_attempt[1]) + except (OverflowError, ValueError, OSError): + logging.debug("Adaptive searching: unable to parse initial and latest search timestamps, search will run.") + return True + else: + logging.debug(f"Adaptive searching: initial search date for {desired_language} is " + f"{initial_search_timestamp}") + logging.debug(f"Adaptive searching: latest search date for {desired_language} is {latest_search_timestamp}") + + # defining basic calculation variables + now = datetime.now() + if settings.general.adaptive_searching_delay.endswith('d'): + extended_search_delay = timedelta(days=int(settings.general.adaptive_searching_delay[:1])) + elif settings.general.adaptive_searching_delay.endswith('w'): + extended_search_delay = timedelta(weeks=int(settings.general.adaptive_searching_delay[:1])) + else: + logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delay from config file: " + f"{settings.general.adaptive_searching_delay}") + return True + logging.debug(f"Adaptive searching: delay after initial search value: {extended_search_delay}") + + if settings.general.adaptive_searching_delta.endswith('d'): + extended_search_delta = timedelta(days=int(settings.general.adaptive_searching_delta[:1])) + elif settings.general.adaptive_searching_delta.endswith('w'): + extended_search_delta = timedelta(weeks=int(settings.general.adaptive_searching_delta[:1])) + else: + logging.debug(f"Adaptive searching: cannot parse adaptive_searching_delta from config file: " + f"{settings.general.adaptive_searching_delta}") + return True + logging.debug(f"Adaptive searching: delta between latest search and now value: {extended_search_delta}") + + if initial_search_timestamp + extended_search_delay > now: + logging.debug(f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delay} since " + f"initial search, search will run.") + return True + else: + logging.debug(f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delay} since " + f"initial search, let's check if it's time to search again.") + if latest_search_timestamp + extended_search_delta <= now: + logging.debug( + f"Adaptive searching: it's been more than {settings.general.adaptive_searching_delta} since " + f"latest search, search will run.") + return True + else: + logging.debug( + f"Adaptive searching: it's been less than {settings.general.adaptive_searching_delta} since " + f"latest search, we're not ready to search yet.") + return False + + logging.debug("adaptive searching is disabled, search will run.") + return True + + +def updateFailedAttempts(desired_language, attempt_string): + """ + Function to parse attempts and make sure we only keep initial and latest search timestamp for each language. + + @param desired_language: 2 letters language to search for in attempts + @type desired_language: str + @param attempt_string: string representation of a list of lists from database column failedAttempts + @type attempt_string: str + + @return: return a string representation of a list of lists like [str(language_code), str(attempts)] + @rtype: str + """ + + try: + # let's try to get a list of lists from the string representation in database + attempts = ast.literal_eval(attempt_string) + logging.debug(f"Adaptive searching: current attempts value is {attempts}") + if type(attempts) is not list: + # attempts should be a list if not, it's malformed or None + raise ValueError + except ValueError: + logging.debug("Adaptive searching: failed to parse attempts value, we'll use an empty list.") + attempts = [] + + matching_attempts = sorted([x for x in attempts if x[0] == desired_language], key=lambda x: x[1]) + logging.debug(f"Adaptive searching: attempts matching language {desired_language}: {matching_attempts}") + + filtered_attempts = sorted([x for x in attempts if x[0] != desired_language], key=lambda x: x[1]) + logging.debug(f"Adaptive searching: attempts not matching language {desired_language}: {filtered_attempts}") + + # get the initial search from attempts if there's one + if len(matching_attempts): + filtered_attempts.append(matching_attempts[0]) + + # append current attempt with language and timestamp to attempts + filtered_attempts.append([desired_language, datetime.timestamp(datetime.now())]) + + updated_attempts = sorted(filtered_attempts, key=lambda x: x[0]) + logging.debug(f"Adaptive searching: updated attempts that will be saved to database is {updated_attempts}") + + return str(updated_attempts) diff --git a/bazarr/notifier.py b/bazarr/notifier.py index c409eef45..7a24303ee 100644 --- a/bazarr/notifier.py +++ b/bazarr/notifier.py @@ -23,11 +23,11 @@ def update_notifier(): notifiers_current.append([notifier['name']]) for x in results['schemas']: - if [x['service_name']] not in notifiers_current: - notifiers_new.append({'name': x['service_name'], 'enabled': 0}) - logging.debug('Adding new notifier agent: ' + x['service_name']) + if [str(x['service_name'])] not in notifiers_current: + notifiers_new.append({'name': str(x['service_name']), 'enabled': 0}) + logging.debug('Adding new notifier agent: ' + str(x['service_name'])) else: - notifiers_old.append([x['service_name']]) + notifiers_old.append([str(x['service_name'])]) notifiers_to_delete = [item for item in notifiers_current if item not in notifiers_old] diff --git a/bazarr/score.py b/bazarr/score.py index 09b568a30..612e63df7 100644 --- a/bazarr/score.py +++ b/bazarr/score.py @@ -146,6 +146,7 @@ class Score: def __init__(self, load_profiles=False, **kwargs): self.data = self.defaults.copy() self.data.update(**kwargs) + self.data["hash"] = self._hash_score() self._profiles = [] self._profiles_loaded = False @@ -205,9 +206,16 @@ class Score: @property def max_score(self): return ( - sum(val for val in self.scores.values() if val > 0) - + sum(item.score for item in self._profiles if item.score > 0) - - self.data["hash"] + self.data["hash"] + + self.data["hearing_impaired"] + + sum(item.score for item in self._profiles if item.score) + ) + + def _hash_score(self): + return sum( + val + for key, val in self.data.items() + if key not in ("hash", "hearing_impaired") ) def __str__(self): diff --git a/bazarr/server.py b/bazarr/server.py index b414d8ee6..0b024d1ec 100644 --- a/bazarr/server.py +++ b/bazarr/server.py @@ -13,8 +13,9 @@ from database import database from app import create_app app = create_app() -from api import api_bp -app.register_blueprint(api_bp) +from api import api_bp_list +for item in api_bp_list: + app.register_blueprint(item, url_prefix=base_url.rstrip('/') + '/api') class Server: diff --git a/bazarr/signalr_client.py b/bazarr/signalr_client.py index f8c3c7e8f..9eb57dafb 100644 --- a/bazarr/signalr_client.py +++ b/bazarr/signalr_client.py @@ -22,9 +22,9 @@ from get_args import args headers = {"User-Agent": os.environ["SZ_USER_AGENT"]} -class SonarrSignalrClient: +class SonarrSignalrClientLegacy: def __init__(self): - super(SonarrSignalrClient, self).__init__() + super(SonarrSignalrClientLegacy, self).__init__() self.apikey_sonarr = None self.session = Session() self.session.timeout = 60 @@ -92,6 +92,65 @@ class SonarrSignalrClient: self.connection.exception += self.exception_handler +class SonarrSignalrClient: + def __init__(self): + super(SonarrSignalrClient, self).__init__() + self.apikey_sonarr = None + self.connection = None + + def start(self): + self.configure() + logging.info('BAZARR trying to connect to Sonarr SignalR feed...') + while self.connection.transport.state.value not in [0, 1, 2]: + try: + self.connection.start() + except ConnectionError: + time.sleep(5) + + def stop(self): + logging.info('BAZARR SignalR client for Sonarr is now disconnected.') + self.connection.stop() + + def restart(self): + if self.connection: + if self.connection.transport.state.value in [0, 1, 2]: + self.stop() + if settings.general.getboolean('use_sonarr'): + self.start() + + def exception_handler(self): + logging.error("BAZARR connection to Sonarr SignalR feed has failed. We'll try to reconnect.") + self.restart() + + @staticmethod + def on_connect_handler(): + logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.') + if not args.dev: + scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1) + scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1) + + def configure(self): + self.apikey_sonarr = settings.sonarr.apikey + self.connection = HubConnectionBuilder() \ + .with_url(url_sonarr() + "/signalr/messages?access_token={}".format(self.apikey_sonarr), + options={ + "verify_ssl": False, + "headers": headers + }) \ + .with_automatic_reconnect({ + "type": "raw", + "keep_alive_interval": 5, + "reconnect_interval": 180, + "max_attempts": None + }).build() + self.connection.on_open(self.on_connect_handler) + self.connection.on_reconnect(lambda: logging.error('BAZARR SignalR client for Sonarr connection as been lost. ' + 'Trying to reconnect...')) + self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Sonarr is disconnected.')) + self.connection.on_error(self.exception_handler) + self.connection.on("receiveMessage", dispatcher) + + class RadarrSignalrClient: def __init__(self): super(RadarrSignalrClient, self).__init__() @@ -186,5 +245,6 @@ def dispatcher(data): return -sonarr_signalr_client = SonarrSignalrClient() +sonarr_signalr_client = SonarrSignalrClientLegacy() if get_sonarr_info.version().startswith(('0.', '2.', '3.')) else \ + SonarrSignalrClient() radarr_signalr_client = RadarrSignalrClient() diff --git a/bazarr/utils.py b/bazarr/utils.py index dfb1c7799..fa88927bc 100644 --- a/bazarr/utils.py +++ b/bazarr/utils.py @@ -256,8 +256,10 @@ class GetSonarrInfo: if 'version' in sonarr_json: sonarr_version = sonarr_json['version'] else: - sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey - sonarr_version = requests.get(sv, timeout=60, verify=False, headers=headers).json()['version'] + raise json.decoder.JSONDecodeError + except json.decoder.JSONDecodeError: + sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey + sonarr_version = requests.get(sv, timeout=60, verify=False, headers=headers).json()['version'] except Exception: logging.debug('BAZARR cannot get Sonarr version') sonarr_version = 'unknown' @@ -434,10 +436,14 @@ def subtitles_apply_mods(language, subtitle_path, mods): def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi): language_code_convert_dict = { 'he': 'iw', + 'zt': 'zh-cn', + 'zh': 'zh-tw', } to_lang = alpha3_from_alpha2(to_lang) - lang_obj = Language(to_lang) + lang_obj = CustomLanguage.from_value(to_lang, "alpha3") + if not lang_obj: + lang_obj = Language(to_lang) if forced: lang_obj = Language.rebuild(lang_obj, forced=True) if hi: @@ -447,7 +453,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi): max_characters = 5000 - dest_srt_file = get_subtitle_path(video_path, language=lang_obj, extension='.srt', forced_tag=forced, hi_tag=hi) + dest_srt_file = get_subtitle_path(video_path, language=lang_obj if isinstance(lang_obj, Language) else lang_obj.subzero_language(), + extension='.srt', forced_tag=forced, hi_tag=hi) subs = pysubs2.load(source_srt_file, encoding='utf-8') lines_list = [x.plaintext for x in subs] @@ -471,8 +478,8 @@ def translate_subtitles_file(video_path, source_srt_file, to_lang, forced, hi): for block_str in lines_block_list: try: translated_partial_srt_text = GoogleTranslator(source='auto', - target=language_code_convert_dict.get(lang_obj.basename, - lang_obj.basename) + target=language_code_convert_dict.get(lang_obj.alpha2, + lang_obj.alpha2) ).translate(text=block_str) except: return False diff --git a/dev-requirements.txt b/dev-requirements.txt index e812f80b9..9ba4733a1 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -4,3 +4,4 @@ pytest pytest-pep8 pytest-flakes pytest-cov +pytest-vcr diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 879175837..45fe602f6 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -20,7 +20,6 @@ "bootstrap": "^4", "lodash": "^4", "moment": "^2.29.1", - "package.json": "^2.0.1", "rc-slider": "^9.7", "react": "^17", "react-bootstrap": "^1", @@ -3156,6 +3155,11 @@ "@sinonjs/commons": "^1.7.0" } }, + "node_modules/@socket.io/component-emitter": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.0.0.tgz", + "integrity": "sha512-2pTGuibAXJswAPJjaKisthqS/NOK5ypG4LYT6tEAV0S/mxW0zOIvYvGK0V8w8+SHxAm6vRMSjqSalFXeBAqs+Q==" + }, "node_modules/@surma/rollup-plugin-off-main-thread": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-1.4.2.tgz", @@ -3427,11 +3431,6 @@ "@types/jquery": "*" } }, - "node_modules/@types/component-emitter": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.10.tgz", - "integrity": "sha512-bsjleuRKWmGqajMerkzox19aGbscQX5rmmvvXl3wlIp5gMG1HgkiwPxsN5p070fBDKTNSPgojVbuY1+HWMbFhg==" - }, "node_modules/@types/d3-path": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-2.0.1.tgz", @@ -3620,9 +3619,9 @@ } }, "node_modules/@types/react-dom": { - "version": "17.0.9", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.9.tgz", - "integrity": "sha512-wIvGxLfgpVDSAMH5utdL9Ngm5Owu0VsGmldro3ORLXV8CShrL8awVj06NuEXFQ5xyaYfdca7Sgbk/50Ri1GdPg==", + "version": "17.0.11", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.11.tgz", + "integrity": "sha512-f96K3k+24RaLGVu/Y2Ng3e1EbZ8/cVJvypZWd7cy0ofCBaf2lcM46xNhycMZ2xGwbBjRql7hOlZ+e2WlJ5MH3Q==", "dev": true, "dependencies": { "@types/react": "*" @@ -4220,14 +4219,6 @@ "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", "integrity": "sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==" }, - "node_modules/abs": { - "version": "1.3.14", - "resolved": "https://registry.npmjs.org/abs/-/abs-1.3.14.tgz", - "integrity": "sha512-PrS26IzwKLWwuURpiKl8wRmJ2KdR/azaVrLEBWG/TALwT20Y7qjtYp1qcMLHA4206hBHY5phv3w4pjf9NPv4Vw==", - "dependencies": { - "ul": "^5.0.0" - } - }, "node_modules/accepts": { "version": "1.3.7", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", @@ -5353,9 +5344,9 @@ } }, "node_modules/base64-arraybuffer": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.4.tgz", - "integrity": "sha1-mBjHngWbE1X5fgQooBfIOOkLqBI=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.1.tgz", + "integrity": "sha512-vFIUq7FdLtjZMhATwDul5RZWv2jpXQ09Pd6jcVEOvIsqCWTRFD/ONHNfyOS8dA/Ippi5dsIgpyKWKZaAKZltbA==", "engines": { "node": ">= 0.6.0" } @@ -5891,14 +5882,6 @@ "node": "6.* || 8.* || >= 10.*" } }, - "node_modules/capture-stack-trace": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz", - "integrity": "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/case-sensitive-paths-webpack-plugin": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz", @@ -6580,17 +6563,6 @@ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, - "node_modules/create-error-class": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", - "integrity": "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y=", - "dependencies": { - "capture-stack-trace": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/create-hash": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", @@ -7216,14 +7188,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", @@ -7357,14 +7321,6 @@ "which": "bin/which" } }, - "node_modules/deffy": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/deffy/-/deffy-2.2.4.tgz", - "integrity": "sha512-pLc9lsbsWjr6RxmJ2OLyvm+9l4j1yK69h+TML/gUit/t3vTijpkNGh8LioaJYTGO7F25m6HZndADcUOo2PsiUg==", - "dependencies": { - "typpy": "^2.0.0" - } - }, "node_modules/define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", @@ -7739,14 +7695,6 @@ "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, - "node_modules/duplexer2": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", - "integrity": "sha1-ixLauHjA1p4+eJEFFmKjL8a93ME=", - "dependencies": { - "readable-stream": "^2.0.2" - } - }, "node_modules/duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", @@ -7837,18 +7785,18 @@ } }, "node_modules/engine.io-client": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-5.1.2.tgz", - "integrity": "sha512-blRrgXIE0A/eurWXRzvfCLG7uUFJqfTGFsyJzXSK71srMMGJ2VraBLg8Mdw28uUxSpVicepBN9X7asqpD1mZcQ==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.1.1.tgz", + "integrity": "sha512-V05mmDo4gjimYW+FGujoGmmmxRaDsrVr7AXA3ZIfa04MWM1jOfZfUwou0oNqhNwy/votUDvGDt4JA4QF4e0b4g==", "dependencies": { - "base64-arraybuffer": "0.1.4", - "component-emitter": "~1.3.0", + "@socket.io/component-emitter": "~3.0.0", "debug": "~4.3.1", - "engine.io-parser": "~4.0.1", + "engine.io-parser": "~5.0.0", "has-cors": "1.1.0", "parseqs": "0.0.6", "parseuri": "0.0.6", - "ws": "~7.4.2", + "ws": "~8.2.3", + "xmlhttprequest-ssl": "~2.0.0", "yeast": "0.1.2" } }, @@ -7873,15 +7821,35 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, + "node_modules/engine.io-client/node_modules/ws": { + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/engine.io-parser": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-4.0.2.tgz", - "integrity": "sha512-sHfEQv6nmtJrq6TKuIz5kyEKH/qSdK56H/A+7DnAuUPWosnIZAS2NHNcPLmyjtY3cGS/MqJdZbUjW97JU72iYg==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.2.tgz", + "integrity": "sha512-wuiO7qO/OEkPJSFueuATIXtrxF7/6GTbAO9QLv7nnbjwZ5tYhLm9zxvLwxstRs0dcT0KUlWTjtIOs1T86jt12g==", "dependencies": { - "base64-arraybuffer": "0.1.4" + "base64-arraybuffer": "~1.0.1" }, "engines": { - "node": ">=8.0.0" + "node": ">=10.0.0" } }, "node_modules/enhanced-resolve": { @@ -7928,14 +7896,6 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/err": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/err/-/err-1.1.1.tgz", - "integrity": "sha1-65KOLhGjFmSPeCgz0PlyWLpDwvg=", - "dependencies": { - "typpy": "^2.2.0" - } - }, "node_modules/errno": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", @@ -9078,15 +9038,6 @@ "safe-buffer": "^5.1.1" } }, - "node_modules/exec-limiter": { - "version": "3.2.13", - "resolved": "https://registry.npmjs.org/exec-limiter/-/exec-limiter-3.2.13.tgz", - "integrity": "sha512-86Ri699bwiHZVBzTzNj8gspqAhCPchg70zPVWIh3qzUOA1pUMcb272Em3LPk8AE0mS95B9yMJhtqF8vFJAn0dA==", - "dependencies": { - "limit-it": "^3.0.0", - "typpy": "^2.1.0" - } - }, "node_modules/exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", @@ -9940,14 +9891,6 @@ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, - "node_modules/function.name": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/function.name/-/function.name-1.0.13.tgz", - "integrity": "sha512-mVrqdoy5npWZyoXl4DxCeuVF6delDcQjVS9aPdvLYlBxtMTZDR2B5GVEQEoM1jJyspCqg3C0v4ABkLE7tp9xFA==", - "dependencies": { - "noop6": "^1.0.1" - } - }, "node_modules/functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", @@ -10017,47 +9960,6 @@ "node": ">=0.10.0" } }, - "node_modules/git-package-json": { - "version": "1.4.10", - "resolved": "https://registry.npmjs.org/git-package-json/-/git-package-json-1.4.10.tgz", - "integrity": "sha512-DRAcvbzd2SxGK7w8OgYfvKqhFliT5keX0lmSmVdgScgf1kkl5tbbo7Pam6uYoCa1liOiipKxQZG8quCtGWl/fA==", - "dependencies": { - "deffy": "^2.2.1", - "err": "^1.1.1", - "gry": "^5.0.0", - "normalize-package-data": "^2.3.5", - "oargv": "^3.4.1", - "one-by-one": "^3.1.0", - "r-json": "^1.2.1", - "r-package-json": "^1.0.0", - "tmp": "0.0.28" - } - }, - "node_modules/git-source": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/git-source/-/git-source-1.1.10.tgz", - "integrity": "sha512-XZZ7ZgnLL35oLgM/xjnLYgtlKlxJG0FohC1kWDvGkU7s1VKGXK0pFF/g1itQEwQ3D+uTQzBnzPi8XbqOv7Wc1Q==", - "dependencies": { - "git-url-parse": "^5.0.1" - } - }, - "node_modules/git-up": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/git-up/-/git-up-1.2.1.tgz", - "integrity": "sha1-JkSAoAax2EJhrB/gmjpRacV+oZ0=", - "dependencies": { - "is-ssh": "^1.0.0", - "parse-url": "^1.0.0" - } - }, - "node_modules/git-url-parse": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/git-url-parse/-/git-url-parse-5.0.1.tgz", - "integrity": "sha1-/j15xnRq4FBIz6UIyB553du6OEM=", - "dependencies": { - "git-up": "^1.0.0" - } - }, "node_modules/glob": { "version": "7.1.7", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", @@ -10150,51 +10052,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/got": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/got/-/got-5.6.0.tgz", - "integrity": "sha1-ux1+4WO3gIK7yOuDbz85UATqb78=", - "dependencies": { - "create-error-class": "^3.0.1", - "duplexer2": "^0.1.4", - "is-plain-obj": "^1.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "node-status-codes": "^1.0.0", - "object-assign": "^4.0.1", - "parse-json": "^2.1.0", - "pinkie-promise": "^2.0.0", - "read-all-stream": "^3.0.0", - "readable-stream": "^2.0.5", - "timed-out": "^2.0.0", - "unzip-response": "^1.0.0", - "url-parse-lax": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/got/node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/got/node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "dependencies": { - "error-ex": "^1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/graceful-fs": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", @@ -10206,17 +10063,6 @@ "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=", "optional": true }, - "node_modules/gry": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/gry/-/gry-5.0.8.tgz", - "integrity": "sha512-meq9ZjYVpLzZh3ojhTg7IMad9grGsx6rUUKHLqPnhLXzJkRQvEL2U3tQpS5/WentYTtHtxkT3Ew/mb10D6F6/g==", - "dependencies": { - "abs": "^1.2.1", - "exec-limiter": "^3.0.0", - "one-by-one": "^3.0.0", - "ul": "^5.0.0" - } - }, "node_modules/gzip-size": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.1.1.tgz", @@ -11434,14 +11280,6 @@ "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==" }, - "node_modules/is-redirect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz", - "integrity": "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -11470,14 +11308,6 @@ "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" }, - "node_modules/is-retry-allowed": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", - "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-root": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", @@ -11486,14 +11316,6 @@ "node": ">=6" } }, - "node_modules/is-ssh": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/is-ssh/-/is-ssh-1.3.3.tgz", - "integrity": "sha512-NKzJmQzJfEEma3w5cJNcUMxoXfDjz0Zj0eyCalHn2E6VOwlzjZo0yuO2fcBSf8zhFuVCL/82/r5gRcoi6aEPVQ==", - "dependencies": { - "protocols": "^1.1.0" - } - }, "node_modules/is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -11694,11 +11516,6 @@ "node": ">=8" } }, - "node_modules/iterate-object": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/iterate-object/-/iterate-object-1.3.4.tgz", - "integrity": "sha512-4dG1D1x/7g8PwHS9aK6QV5V94+ZvyP4+d19qDv43EzImmrndysIl4prmJ1hWWIGCqrZHyaHBm6BSEWHOLnpoNw==" - }, "node_modules/jest": { "version": "26.6.0", "resolved": "https://registry.npmjs.org/jest/-/jest-26.6.0.tgz", @@ -13413,14 +13230,6 @@ "node": ">= 0.8.0" } }, - "node_modules/limit-it": { - "version": "3.2.10", - "resolved": "https://registry.npmjs.org/limit-it/-/limit-it-3.2.10.tgz", - "integrity": "sha512-T0NK99pHnkimldr1WUqvbGV1oWDku/xC9J/OqzJFsV1jeOS6Bwl8W7vkeQIBqwiON9dTALws+rX/XPMQqWerDQ==", - "dependencies": { - "typpy": "^2.0.0" - } - }, "node_modules/lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", @@ -13582,14 +13391,6 @@ "tslib": "^2.0.3" } }, - "node_modules/lowercase-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", - "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -14235,19 +14036,6 @@ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.73.tgz", "integrity": "sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg==" }, - "node_modules/node-status-codes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-status-codes/-/node-status-codes-1.0.0.tgz", - "integrity": "sha1-WuVUHQJGRdMqWPzdyc7s6nrjrC8=", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/noop6": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/noop6/-/noop6-1.0.9.tgz", - "integrity": "sha512-DB3Hwyd89dPr5HqEPg3YHjzvwh/mCqizC1zZ8vyofqc+TQRyPDnT4wgXXbLGF4z9YAzwwTLi8pNLhGqcbSjgkA==" - }, "node_modules/normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -14329,23 +14117,6 @@ "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==" }, - "node_modules/oargv": { - "version": "3.4.10", - "resolved": "https://registry.npmjs.org/oargv/-/oargv-3.4.10.tgz", - "integrity": "sha512-SXaMANv9sr7S/dP0vj0+Ybipa47UE1ntTWQ2rpPRhC6Bsvfl+Jg03Xif7jfL0sWKOYWK8oPjcZ5eJ82t8AP/8g==", - "dependencies": { - "iterate-object": "^1.1.0", - "ul": "^5.0.0" - } - }, - "node_modules/obj-def": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/obj-def/-/obj-def-1.0.9.tgz", - "integrity": "sha512-bQ4ya3VYD6FAA1+s6mEhaURRHSmw4+sKaXE6UyXZ1XDYc5D+c7look25dFdydmLd18epUegh398gdDkMUZI9xg==", - "dependencies": { - "deffy": "^2.2.2" - } - }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -14596,15 +14367,6 @@ "wrappy": "1" } }, - "node_modules/one-by-one": { - "version": "3.2.8", - "resolved": "https://registry.npmjs.org/one-by-one/-/one-by-one-3.2.8.tgz", - "integrity": "sha512-HR/pSzZdm46Xqj58K+Bu64kMbSTw8/u77AwWvV+rprO/OsuR++pPlkUJn+SmwqBGRgHKwSKQ974V3uls7crIeQ==", - "dependencies": { - "obj-def": "^1.0.0", - "sliced": "^1.0.1" - } - }, "node_modules/onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -14694,14 +14456,6 @@ "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=" }, - "node_modules/os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/p-each-series": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.2.0.tgz", @@ -14768,47 +14522,6 @@ "node": ">=6" } }, - "node_modules/package-json": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-2.4.0.tgz", - "integrity": "sha1-DRW9Z9HLvduyyiIv8u24a8sxqLs=", - "dependencies": { - "got": "^5.0.0", - "registry-auth-token": "^3.0.1", - "registry-url": "^3.0.3", - "semver": "^5.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/package-json-path": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/package-json-path/-/package-json-path-1.0.9.tgz", - "integrity": "sha512-uNu7f6Ef7tQHZRnkyVnCtzdSYVN9uBtge/sG7wzcUaawFWkPYUq67iXxRGrQSg/q0tzxIB8jSyIYUKjG2Jn//A==", - "dependencies": { - "abs": "^1.2.1" - } - }, - "node_modules/package-json/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/package.json": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/package.json/-/package.json-2.0.1.tgz", - "integrity": "sha1-+IYFnSpJ7QduZIg2ldc7K0bSHW0=", - "deprecated": "Use pkg.json instead.", - "dependencies": { - "git-package-json": "^1.4.0", - "git-source": "^1.1.0", - "package-json": "^2.3.1" - } - }, "node_modules/pako": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", @@ -14873,15 +14586,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/parse-url": { - "version": "1.3.11", - "resolved": "https://registry.npmjs.org/parse-url/-/parse-url-1.3.11.tgz", - "integrity": "sha1-V8FUKKuKiSsfQ4aWRccR0OFEtVQ=", - "dependencies": { - "is-ssh": "^1.3.0", - "protocols": "^1.4.0" - } - }, "node_modules/parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", @@ -16522,9 +16226,9 @@ "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==" }, "node_modules/pretty-quick": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/pretty-quick/-/pretty-quick-3.1.1.tgz", - "integrity": "sha512-ZYLGiMoV2jcaas3vTJrLvKAYsxDoXQBUn8OSTxkl67Fyov9lyXivJTl0+2WVh+y6EovGcw7Lm5ThYpH+Sh3XxQ==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pretty-quick/-/pretty-quick-3.1.2.tgz", + "integrity": "sha512-T+fpTJrDjTzewql4p3lKrRA7z3MrNyjBK1MKeaBm5PpKwATgVm885TpY7TgY8KFt5Q1Qn3QDseRQcyX9AKTKkA==", "dev": true, "dependencies": { "chalk": "^3.0.0", @@ -16677,11 +16381,6 @@ "react": ">=0.14.0" } }, - "node_modules/protocols": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/protocols/-/protocols-1.4.8.tgz", - "integrity": "sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg==" - }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -16828,20 +16527,6 @@ } ] }, - "node_modules/r-json": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/r-json/-/r-json-1.2.10.tgz", - "integrity": "sha512-hu9vyLjSlHXT62NAS7DjI9WazDlvjN0lgp3n431dCVnirVcLkZIpzSwA3orhZEKzdDD2jqNYI+w0yG0aFf4kpA==" - }, - "node_modules/r-package-json": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/r-package-json/-/r-package-json-1.0.9.tgz", - "integrity": "sha512-G4Vpf1KImWmmPFGdtWQTU0L9zk0SjqEC4qs/jE7AQ+Ylmr5kizMzGeC4wnHp5+ijPqNN+2ZPpvyjVNdN1CDVcg==", - "dependencies": { - "package-json-path": "^1.0.0", - "r-json": "^1.2.1" - } - }, "node_modules/raf": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", @@ -16897,20 +16582,6 @@ "node": ">= 0.8" } }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, "node_modules/rc-align": { "version": "4.0.9", "resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.9.tgz", @@ -17006,14 +16677,6 @@ "react-dom": ">=16.9.0" } }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/react": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz", @@ -17717,18 +17380,6 @@ "react-dom": ">=16.6.0" } }, - "node_modules/read-all-stream": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/read-all-stream/-/read-all-stream-3.1.0.tgz", - "integrity": "sha1-NcPhd/IHjveJ7kv6+kNzB06u9Po=", - "dependencies": { - "pinkie-promise": "^2.0.0", - "readable-stream": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/read-pkg": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", @@ -18035,26 +17686,6 @@ "node": ">=4" } }, - "node_modules/registry-auth-token": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.4.0.tgz", - "integrity": "sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A==", - "dependencies": { - "rc": "^1.1.6", - "safe-buffer": "^5.0.1" - } - }, - "node_modules/registry-url": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz", - "integrity": "sha1-PU74cPc93h138M+aOBQyRE4XSUI=", - "dependencies": { - "rc": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/regjsgen": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz", @@ -19079,11 +18710,6 @@ "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, - "node_modules/sliced": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", - "integrity": "sha1-CzpmK10Ewxd7GSa+qCsD+Dei70E=" - }, "node_modules/snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -19244,17 +18870,16 @@ } }, "node_modules/socket.io-client": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.1.3.tgz", - "integrity": "sha512-hISFn6PDpgDifVUiNklLHVPTMv1LAk8poHArfIUdXa+gKgbr0MZbAlquDFqCqsF30yBqa+jg42wgos2FK50BHA==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.4.0.tgz", + "integrity": "sha512-g7riSEJXi7qCFImPow98oT8X++MSsHz6MMFRXkWNJ6uEROSHOa3kxdrsYWMq85dO+09CFMkcqlpjvbVXQl4z6g==", "dependencies": { - "@types/component-emitter": "^1.2.10", + "@socket.io/component-emitter": "~3.0.0", "backo2": "~1.0.2", - "component-emitter": "~1.3.0", - "debug": "~4.3.1", - "engine.io-client": "~5.1.2", + "debug": "~4.3.2", + "engine.io-client": "~6.1.1", "parseuri": "0.0.6", - "socket.io-parser": "~4.0.4" + "socket.io-parser": "~4.1.1" }, "engines": { "node": ">=10.0.0" @@ -19282,12 +18907,11 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/socket.io-parser": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.0.4.tgz", - "integrity": "sha512-t+b0SS+IxG7Rxzda2EVvyBZbvFPBCjJoyHuE0P//7OAsN23GItzDRdWa6ALxZI/8R5ygK7jAR6t028/z+7295g==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.1.1.tgz", + "integrity": "sha512-USQVLSkDWE5nbcY760ExdKaJxCE65kcsG/8k5FDGZVVxpD1pA7hABYXYkCUvxUuYYh/+uQw0N/fvBzfT8o07KA==", "dependencies": { - "@types/component-emitter": "^1.2.10", - "component-emitter": "~1.3.0", + "@socket.io/component-emitter": "~3.0.0", "debug": "~4.3.1" }, "engines": { @@ -20496,14 +20120,6 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, - "node_modules/timed-out": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-2.0.0.tgz", - "integrity": "sha1-84sK6B03R9YoAB9B2vxlKs5nHAo=", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/timers-browserify": { "version": "2.0.12", "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", @@ -20530,17 +20146,6 @@ "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, - "node_modules/tmp": { - "version": "0.0.28", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.28.tgz", - "integrity": "sha1-Fyc1t/YU6nrzlmT6hM8N5OUV0SA=", - "dependencies": { - "os-tmpdir": "~1.0.1" - }, - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -20784,23 +20389,6 @@ "node": ">=4.2.0" } }, - "node_modules/typpy": { - "version": "2.3.13", - "resolved": "https://registry.npmjs.org/typpy/-/typpy-2.3.13.tgz", - "integrity": "sha512-vOxIcQz9sxHi+rT09SJ5aDgVgrPppQjwnnayTrMye1ODaU8gIZTDM19t9TxmEElbMihx2Nq/0/b/MtyKfayRqA==", - "dependencies": { - "function.name": "^1.0.3" - } - }, - "node_modules/ul": { - "version": "5.2.15", - "resolved": "https://registry.npmjs.org/ul/-/ul-5.2.15.tgz", - "integrity": "sha512-svLEUy8xSCip5IWnsRa0UOg+2zP0Wsj4qlbjTmX6GJSmvKMHADBuHOm1dpNkWqWPIGuVSqzUkV3Cris5JrlTRQ==", - "dependencies": { - "deffy": "^2.2.2", - "typpy": "^2.3.4" - } - }, "node_modules/unbox-primitive": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", @@ -20994,14 +20582,6 @@ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, - "node_modules/unzip-response": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-1.0.2.tgz", - "integrity": "sha1-uYTwh3/AqJwsdzzB73tbIytbBv4=", - "engines": { - "node": ">=0.10" - } - }, "node_modules/upath": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", @@ -21086,17 +20666,6 @@ "requires-port": "^1.0.0" } }, - "node_modules/url-parse-lax": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", - "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", - "dependencies": { - "prepend-http": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/url/node_modules/punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", @@ -22680,6 +22249,14 @@ "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==" }, + "node_modules/xmlhttprequest-ssl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz", + "integrity": "sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", @@ -24990,6 +24567,11 @@ "@sinonjs/commons": "^1.7.0" } }, + "@socket.io/component-emitter": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.0.0.tgz", + "integrity": "sha512-2pTGuibAXJswAPJjaKisthqS/NOK5ypG4LYT6tEAV0S/mxW0zOIvYvGK0V8w8+SHxAm6vRMSjqSalFXeBAqs+Q==" + }, "@surma/rollup-plugin-off-main-thread": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/@surma/rollup-plugin-off-main-thread/-/rollup-plugin-off-main-thread-1.4.2.tgz", @@ -25160,11 +24742,6 @@ "@types/jquery": "*" } }, - "@types/component-emitter": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/@types/component-emitter/-/component-emitter-1.2.10.tgz", - "integrity": "sha512-bsjleuRKWmGqajMerkzox19aGbscQX5rmmvvXl3wlIp5gMG1HgkiwPxsN5p070fBDKTNSPgojVbuY1+HWMbFhg==" - }, "@types/d3-path": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-2.0.1.tgz", @@ -25353,9 +24930,9 @@ } }, "@types/react-dom": { - "version": "17.0.9", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.9.tgz", - "integrity": "sha512-wIvGxLfgpVDSAMH5utdL9Ngm5Owu0VsGmldro3ORLXV8CShrL8awVj06NuEXFQ5xyaYfdca7Sgbk/50Ri1GdPg==", + "version": "17.0.11", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.11.tgz", + "integrity": "sha512-f96K3k+24RaLGVu/Y2Ng3e1EbZ8/cVJvypZWd7cy0ofCBaf2lcM46xNhycMZ2xGwbBjRql7hOlZ+e2WlJ5MH3Q==", "dev": true, "requires": { "@types/react": "*" @@ -25845,14 +25422,6 @@ "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", "integrity": "sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q==" }, - "abs": { - "version": "1.3.14", - "resolved": "https://registry.npmjs.org/abs/-/abs-1.3.14.tgz", - "integrity": "sha512-PrS26IzwKLWwuURpiKl8wRmJ2KdR/azaVrLEBWG/TALwT20Y7qjtYp1qcMLHA4206hBHY5phv3w4pjf9NPv4Vw==", - "requires": { - "ul": "^5.0.0" - } - }, "accepts": { "version": "1.3.7", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", @@ -26735,9 +26304,9 @@ } }, "base64-arraybuffer": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-0.1.4.tgz", - "integrity": "sha1-mBjHngWbE1X5fgQooBfIOOkLqBI=" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.1.tgz", + "integrity": "sha512-vFIUq7FdLtjZMhATwDul5RZWv2jpXQ09Pd6jcVEOvIsqCWTRFD/ONHNfyOS8dA/Ippi5dsIgpyKWKZaAKZltbA==" }, "base64-js": { "version": "1.5.1", @@ -27161,11 +26730,6 @@ "rsvp": "^4.8.4" } }, - "capture-stack-trace": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz", - "integrity": "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==" - }, "case-sensitive-paths-webpack-plugin": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz", @@ -27715,14 +27279,6 @@ } } }, - "create-error-class": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", - "integrity": "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y=", - "requires": { - "capture-stack-trace": "^1.0.0" - } - }, "create-hash": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", @@ -28230,11 +27786,6 @@ "regexp.prototype.flags": "^1.2.0" } }, - "deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" - }, "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", @@ -28334,14 +27885,6 @@ } } }, - "deffy": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/deffy/-/deffy-2.2.4.tgz", - "integrity": "sha512-pLc9lsbsWjr6RxmJ2OLyvm+9l4j1yK69h+TML/gUit/t3vTijpkNGh8LioaJYTGO7F25m6HZndADcUOo2PsiUg==", - "requires": { - "typpy": "^2.0.0" - } - }, "define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", @@ -28637,14 +28180,6 @@ "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" }, - "duplexer2": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", - "integrity": "sha1-ixLauHjA1p4+eJEFFmKjL8a93ME=", - "requires": { - "readable-stream": "^2.0.2" - } - }, "duplexify": { "version": "3.7.1", "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", @@ -28721,18 +28256,18 @@ } }, "engine.io-client": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-5.1.2.tgz", - "integrity": "sha512-blRrgXIE0A/eurWXRzvfCLG7uUFJqfTGFsyJzXSK71srMMGJ2VraBLg8Mdw28uUxSpVicepBN9X7asqpD1mZcQ==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.1.1.tgz", + "integrity": "sha512-V05mmDo4gjimYW+FGujoGmmmxRaDsrVr7AXA3ZIfa04MWM1jOfZfUwou0oNqhNwy/votUDvGDt4JA4QF4e0b4g==", "requires": { - "base64-arraybuffer": "0.1.4", - "component-emitter": "~1.3.0", + "@socket.io/component-emitter": "~3.0.0", "debug": "~4.3.1", - "engine.io-parser": "~4.0.1", + "engine.io-parser": "~5.0.0", "has-cors": "1.1.0", "parseqs": "0.0.6", "parseuri": "0.0.6", - "ws": "~7.4.2", + "ws": "~8.2.3", + "xmlhttprequest-ssl": "~2.0.0", "yeast": "0.1.2" }, "dependencies": { @@ -28748,15 +28283,21 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "ws": { + "version": "8.2.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.2.3.tgz", + "integrity": "sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA==", + "requires": {} } } }, "engine.io-parser": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-4.0.2.tgz", - "integrity": "sha512-sHfEQv6nmtJrq6TKuIz5kyEKH/qSdK56H/A+7DnAuUPWosnIZAS2NHNcPLmyjtY3cGS/MqJdZbUjW97JU72iYg==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.0.2.tgz", + "integrity": "sha512-wuiO7qO/OEkPJSFueuATIXtrxF7/6GTbAO9QLv7nnbjwZ5tYhLm9zxvLwxstRs0dcT0KUlWTjtIOs1T86jt12g==", "requires": { - "base64-arraybuffer": "0.1.4" + "base64-arraybuffer": "~1.0.1" } }, "enhanced-resolve": { @@ -28793,14 +28334,6 @@ "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==" }, - "err": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/err/-/err-1.1.1.tgz", - "integrity": "sha1-65KOLhGjFmSPeCgz0PlyWLpDwvg=", - "requires": { - "typpy": "^2.2.0" - } - }, "errno": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", @@ -29605,15 +29138,6 @@ "safe-buffer": "^5.1.1" } }, - "exec-limiter": { - "version": "3.2.13", - "resolved": "https://registry.npmjs.org/exec-limiter/-/exec-limiter-3.2.13.tgz", - "integrity": "sha512-86Ri699bwiHZVBzTzNj8gspqAhCPchg70zPVWIh3qzUOA1pUMcb272Em3LPk8AE0mS95B9yMJhtqF8vFJAn0dA==", - "requires": { - "limit-it": "^3.0.0", - "typpy": "^2.1.0" - } - }, "exec-sh": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.6.tgz", @@ -30282,14 +29806,6 @@ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, - "function.name": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/function.name/-/function.name-1.0.13.tgz", - "integrity": "sha512-mVrqdoy5npWZyoXl4DxCeuVF6delDcQjVS9aPdvLYlBxtMTZDR2B5GVEQEoM1jJyspCqg3C0v4ABkLE7tp9xFA==", - "requires": { - "noop6": "^1.0.1" - } - }, "functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", @@ -30338,47 +29854,6 @@ "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=" }, - "git-package-json": { - "version": "1.4.10", - "resolved": "https://registry.npmjs.org/git-package-json/-/git-package-json-1.4.10.tgz", - "integrity": "sha512-DRAcvbzd2SxGK7w8OgYfvKqhFliT5keX0lmSmVdgScgf1kkl5tbbo7Pam6uYoCa1liOiipKxQZG8quCtGWl/fA==", - "requires": { - "deffy": "^2.2.1", - "err": "^1.1.1", - "gry": "^5.0.0", - "normalize-package-data": "^2.3.5", - "oargv": "^3.4.1", - "one-by-one": "^3.1.0", - "r-json": "^1.2.1", - "r-package-json": "^1.0.0", - "tmp": "0.0.28" - } - }, - "git-source": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/git-source/-/git-source-1.1.10.tgz", - "integrity": "sha512-XZZ7ZgnLL35oLgM/xjnLYgtlKlxJG0FohC1kWDvGkU7s1VKGXK0pFF/g1itQEwQ3D+uTQzBnzPi8XbqOv7Wc1Q==", - "requires": { - "git-url-parse": "^5.0.1" - } - }, - "git-up": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/git-up/-/git-up-1.2.1.tgz", - "integrity": "sha1-JkSAoAax2EJhrB/gmjpRacV+oZ0=", - "requires": { - "is-ssh": "^1.0.0", - "parse-url": "^1.0.0" - } - }, - "git-url-parse": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/git-url-parse/-/git-url-parse-5.0.1.tgz", - "integrity": "sha1-/j15xnRq4FBIz6UIyB553du6OEM=", - "requires": { - "git-up": "^1.0.0" - } - }, "glob": { "version": "7.1.7", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", @@ -30446,44 +29921,6 @@ "slash": "^3.0.0" } }, - "got": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/got/-/got-5.6.0.tgz", - "integrity": "sha1-ux1+4WO3gIK7yOuDbz85UATqb78=", - "requires": { - "create-error-class": "^3.0.1", - "duplexer2": "^0.1.4", - "is-plain-obj": "^1.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "node-status-codes": "^1.0.0", - "object-assign": "^4.0.1", - "parse-json": "^2.1.0", - "pinkie-promise": "^2.0.0", - "read-all-stream": "^3.0.0", - "readable-stream": "^2.0.5", - "timed-out": "^2.0.0", - "unzip-response": "^1.0.0", - "url-parse-lax": "^1.0.0" - }, - "dependencies": { - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=" - }, - "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", - "requires": { - "error-ex": "^1.2.0" - } - } - } - }, "graceful-fs": { "version": "4.2.8", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", @@ -30495,17 +29932,6 @@ "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=", "optional": true }, - "gry": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/gry/-/gry-5.0.8.tgz", - "integrity": "sha512-meq9ZjYVpLzZh3ojhTg7IMad9grGsx6rUUKHLqPnhLXzJkRQvEL2U3tQpS5/WentYTtHtxkT3Ew/mb10D6F6/g==", - "requires": { - "abs": "^1.2.1", - "exec-limiter": "^3.0.0", - "one-by-one": "^3.0.0", - "ul": "^5.0.0" - } - }, "gzip-size": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.1.1.tgz", @@ -31394,11 +30820,6 @@ "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==" }, - "is-redirect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz", - "integrity": "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ=" - }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -31418,24 +30839,11 @@ "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==" }, - "is-retry-allowed": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", - "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==" - }, "is-root": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==" }, - "is-ssh": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/is-ssh/-/is-ssh-1.3.3.tgz", - "integrity": "sha512-NKzJmQzJfEEma3w5cJNcUMxoXfDjz0Zj0eyCalHn2E6VOwlzjZo0yuO2fcBSf8zhFuVCL/82/r5gRcoi6aEPVQ==", - "requires": { - "protocols": "^1.1.0" - } - }, "is-stream": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", @@ -31577,11 +30985,6 @@ "istanbul-lib-report": "^3.0.0" } }, - "iterate-object": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/iterate-object/-/iterate-object-1.3.4.tgz", - "integrity": "sha512-4dG1D1x/7g8PwHS9aK6QV5V94+ZvyP4+d19qDv43EzImmrndysIl4prmJ1hWWIGCqrZHyaHBm6BSEWHOLnpoNw==" - }, "jest": { "version": "26.6.0", "resolved": "https://registry.npmjs.org/jest/-/jest-26.6.0.tgz", @@ -32879,14 +32282,6 @@ "type-check": "~0.4.0" } }, - "limit-it": { - "version": "3.2.10", - "resolved": "https://registry.npmjs.org/limit-it/-/limit-it-3.2.10.tgz", - "integrity": "sha512-T0NK99pHnkimldr1WUqvbGV1oWDku/xC9J/OqzJFsV1jeOS6Bwl8W7vkeQIBqwiON9dTALws+rX/XPMQqWerDQ==", - "requires": { - "typpy": "^2.0.0" - } - }, "lines-and-columns": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", @@ -33022,11 +32417,6 @@ "tslib": "^2.0.3" } }, - "lowercase-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", - "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -33554,16 +32944,6 @@ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.73.tgz", "integrity": "sha512-uW7fodD6pyW2FZNZnp/Z3hvWKeEW1Y8R1+1CnErE8cXFXzl5blBOoVB41CvMer6P6Q0S5FXDwcHgFd1Wj0U9zg==" }, - "node-status-codes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-status-codes/-/node-status-codes-1.0.0.tgz", - "integrity": "sha1-WuVUHQJGRdMqWPzdyc7s6nrjrC8=" - }, - "noop6": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/noop6/-/noop6-1.0.9.tgz", - "integrity": "sha512-DB3Hwyd89dPr5HqEPg3YHjzvwh/mCqizC1zZ8vyofqc+TQRyPDnT4wgXXbLGF4z9YAzwwTLi8pNLhGqcbSjgkA==" - }, "normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -33629,23 +33009,6 @@ "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==" }, - "oargv": { - "version": "3.4.10", - "resolved": "https://registry.npmjs.org/oargv/-/oargv-3.4.10.tgz", - "integrity": "sha512-SXaMANv9sr7S/dP0vj0+Ybipa47UE1ntTWQ2rpPRhC6Bsvfl+Jg03Xif7jfL0sWKOYWK8oPjcZ5eJ82t8AP/8g==", - "requires": { - "iterate-object": "^1.1.0", - "ul": "^5.0.0" - } - }, - "obj-def": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/obj-def/-/obj-def-1.0.9.tgz", - "integrity": "sha512-bQ4ya3VYD6FAA1+s6mEhaURRHSmw4+sKaXE6UyXZ1XDYc5D+c7look25dFdydmLd18epUegh398gdDkMUZI9xg==", - "requires": { - "deffy": "^2.2.2" - } - }, "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -33825,15 +33188,6 @@ "wrappy": "1" } }, - "one-by-one": { - "version": "3.2.8", - "resolved": "https://registry.npmjs.org/one-by-one/-/one-by-one-3.2.8.tgz", - "integrity": "sha512-HR/pSzZdm46Xqj58K+Bu64kMbSTw8/u77AwWvV+rprO/OsuR++pPlkUJn+SmwqBGRgHKwSKQ974V3uls7crIeQ==", - "requires": { - "obj-def": "^1.0.0", - "sliced": "^1.0.1" - } - }, "onetime": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", @@ -33901,11 +33255,6 @@ "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=" }, - "os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" - }, "p-each-series": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.2.0.tgz", @@ -33945,42 +33294,6 @@ "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" }, - "package-json": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-2.4.0.tgz", - "integrity": "sha1-DRW9Z9HLvduyyiIv8u24a8sxqLs=", - "requires": { - "got": "^5.0.0", - "registry-auth-token": "^3.0.1", - "registry-url": "^3.0.3", - "semver": "^5.1.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } - } - }, - "package-json-path": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/package-json-path/-/package-json-path-1.0.9.tgz", - "integrity": "sha512-uNu7f6Ef7tQHZRnkyVnCtzdSYVN9uBtge/sG7wzcUaawFWkPYUq67iXxRGrQSg/q0tzxIB8jSyIYUKjG2Jn//A==", - "requires": { - "abs": "^1.2.1" - } - }, - "package.json": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/package.json/-/package.json-2.0.1.tgz", - "integrity": "sha1-+IYFnSpJ7QduZIg2ldc7K0bSHW0=", - "requires": { - "git-package-json": "^1.4.0", - "git-source": "^1.1.0", - "package-json": "^2.3.1" - } - }, "pako": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", @@ -34036,15 +33349,6 @@ "lines-and-columns": "^1.1.6" } }, - "parse-url": { - "version": "1.3.11", - "resolved": "https://registry.npmjs.org/parse-url/-/parse-url-1.3.11.tgz", - "integrity": "sha1-V8FUKKuKiSsfQ4aWRccR0OFEtVQ=", - "requires": { - "is-ssh": "^1.3.0", - "protocols": "^1.4.0" - } - }, "parse5": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", @@ -35367,9 +34671,9 @@ } }, "pretty-quick": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/pretty-quick/-/pretty-quick-3.1.1.tgz", - "integrity": "sha512-ZYLGiMoV2jcaas3vTJrLvKAYsxDoXQBUn8OSTxkl67Fyov9lyXivJTl0+2WVh+y6EovGcw7Lm5ThYpH+Sh3XxQ==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/pretty-quick/-/pretty-quick-3.1.2.tgz", + "integrity": "sha512-T+fpTJrDjTzewql4p3lKrRA7z3MrNyjBK1MKeaBm5PpKwATgVm885TpY7TgY8KFt5Q1Qn3QDseRQcyX9AKTKkA==", "dev": true, "requires": { "chalk": "^3.0.0", @@ -35485,11 +34789,6 @@ "warning": "^4.0.0" } }, - "protocols": { - "version": "1.4.8", - "resolved": "https://registry.npmjs.org/protocols/-/protocols-1.4.8.tgz", - "integrity": "sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg==" - }, "proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -35603,20 +34902,6 @@ "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" }, - "r-json": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/r-json/-/r-json-1.2.10.tgz", - "integrity": "sha512-hu9vyLjSlHXT62NAS7DjI9WazDlvjN0lgp3n431dCVnirVcLkZIpzSwA3orhZEKzdDD2jqNYI+w0yG0aFf4kpA==" - }, - "r-package-json": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/r-package-json/-/r-package-json-1.0.9.tgz", - "integrity": "sha512-G4Vpf1KImWmmPFGdtWQTU0L9zk0SjqEC4qs/jE7AQ+Ylmr5kizMzGeC4wnHp5+ijPqNN+2ZPpvyjVNdN1CDVcg==", - "requires": { - "package-json-path": "^1.0.0", - "r-json": "^1.2.1" - } - }, "raf": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", @@ -35665,24 +34950,6 @@ } } }, - "rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "dependencies": { - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" - } - } - }, "rc-align": { "version": "4.0.9", "resolved": "https://registry.npmjs.org/rc-align/-/rc-align-4.0.9.tgz", @@ -36288,15 +35555,6 @@ "prop-types": "^15.6.2" } }, - "read-all-stream": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/read-all-stream/-/read-all-stream-3.1.0.tgz", - "integrity": "sha1-NcPhd/IHjveJ7kv6+kNzB06u9Po=", - "requires": { - "pinkie-promise": "^2.0.0", - "readable-stream": "^2.0.0" - } - }, "read-pkg": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", @@ -36552,23 +35810,6 @@ "unicode-match-property-value-ecmascript": "^1.2.0" } }, - "registry-auth-token": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.4.0.tgz", - "integrity": "sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A==", - "requires": { - "rc": "^1.1.6", - "safe-buffer": "^5.0.1" - } - }, - "registry-url": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz", - "integrity": "sha1-PU74cPc93h138M+aOBQyRE4XSUI=", - "requires": { - "rc": "^1.0.1" - } - }, "regjsgen": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz", @@ -37366,11 +36607,6 @@ "is-fullwidth-code-point": "^3.0.0" } }, - "sliced": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", - "integrity": "sha1-CzpmK10Ewxd7GSa+qCsD+Dei70E=" - }, "snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -37499,17 +36735,16 @@ } }, "socket.io-client": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.1.3.tgz", - "integrity": "sha512-hISFn6PDpgDifVUiNklLHVPTMv1LAk8poHArfIUdXa+gKgbr0MZbAlquDFqCqsF30yBqa+jg42wgos2FK50BHA==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.4.0.tgz", + "integrity": "sha512-g7riSEJXi7qCFImPow98oT8X++MSsHz6MMFRXkWNJ6uEROSHOa3kxdrsYWMq85dO+09CFMkcqlpjvbVXQl4z6g==", "requires": { - "@types/component-emitter": "^1.2.10", + "@socket.io/component-emitter": "~3.0.0", "backo2": "~1.0.2", - "component-emitter": "~1.3.0", - "debug": "~4.3.1", - "engine.io-client": "~5.1.2", + "debug": "~4.3.2", + "engine.io-client": "~6.1.1", "parseuri": "0.0.6", - "socket.io-parser": "~4.0.4" + "socket.io-parser": "~4.1.1" }, "dependencies": { "debug": { @@ -37528,12 +36763,11 @@ } }, "socket.io-parser": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.0.4.tgz", - "integrity": "sha512-t+b0SS+IxG7Rxzda2EVvyBZbvFPBCjJoyHuE0P//7OAsN23GItzDRdWa6ALxZI/8R5ygK7jAR6t028/z+7295g==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.1.1.tgz", + "integrity": "sha512-USQVLSkDWE5nbcY760ExdKaJxCE65kcsG/8k5FDGZVVxpD1pA7hABYXYkCUvxUuYYh/+uQw0N/fvBzfT8o07KA==", "requires": { - "@types/component-emitter": "^1.2.10", - "component-emitter": "~1.3.0", + "@socket.io/component-emitter": "~3.0.0", "debug": "~4.3.1" }, "dependencies": { @@ -38485,11 +37719,6 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, - "timed-out": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-2.0.0.tgz", - "integrity": "sha1-84sK6B03R9YoAB9B2vxlKs5nHAo=" - }, "timers-browserify": { "version": "2.0.12", "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", @@ -38513,14 +37742,6 @@ "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" }, - "tmp": { - "version": "0.0.28", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.28.tgz", - "integrity": "sha1-Fyc1t/YU6nrzlmT6hM8N5OUV0SA=", - "requires": { - "os-tmpdir": "~1.0.1" - } - }, "tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -38706,23 +37927,6 @@ "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.3.tgz", "integrity": "sha512-4xfscpisVgqqDfPaJo5vkd+Qd/ItkoagnHpufr+i2QCHBsNYp+G7UAoyFl8aPtx879u38wPV65rZ8qbGZijalA==" }, - "typpy": { - "version": "2.3.13", - "resolved": "https://registry.npmjs.org/typpy/-/typpy-2.3.13.tgz", - "integrity": "sha512-vOxIcQz9sxHi+rT09SJ5aDgVgrPppQjwnnayTrMye1ODaU8gIZTDM19t9TxmEElbMihx2Nq/0/b/MtyKfayRqA==", - "requires": { - "function.name": "^1.0.3" - } - }, - "ul": { - "version": "5.2.15", - "resolved": "https://registry.npmjs.org/ul/-/ul-5.2.15.tgz", - "integrity": "sha512-svLEUy8xSCip5IWnsRa0UOg+2zP0Wsj4qlbjTmX6GJSmvKMHADBuHOm1dpNkWqWPIGuVSqzUkV3Cris5JrlTRQ==", - "requires": { - "deffy": "^2.2.2", - "typpy": "^2.3.4" - } - }, "unbox-primitive": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.1.tgz", @@ -38877,11 +38081,6 @@ } } }, - "unzip-response": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-1.0.2.tgz", - "integrity": "sha1-uYTwh3/AqJwsdzzB73tbIytbBv4=" - }, "upath": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", @@ -38952,14 +38151,6 @@ "requires-port": "^1.0.0" } }, - "url-parse-lax": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", - "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", - "requires": { - "prepend-http": "^1.0.1" - } - }, "use": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", @@ -40225,6 +39416,11 @@ "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==" }, + "xmlhttprequest-ssl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz", + "integrity": "sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==" + }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", diff --git a/frontend/package.json b/frontend/package.json index 0ebd83d4a..4ec88d834 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -25,7 +25,6 @@ "bootstrap": "^4", "lodash": "^4", "moment": "^2.29.1", - "package.json": "^2.0.1", "rc-slider": "^9.7", "react": "^17", "react-bootstrap": "^1", diff --git a/frontend/src/@types/api.d.ts b/frontend/src/@types/api.d.ts index 043753879..2d820460a 100644 --- a/frontend/src/@types/api.d.ts +++ b/frontend/src/@types/api.d.ts @@ -33,6 +33,8 @@ declare namespace Language { profileId: number; cutoff: number | null; items: ProfileItem[]; + mustContain: string[]; + mustNotContain: string[]; } } diff --git a/frontend/src/@types/settings.d.ts b/frontend/src/@types/settings.d.ts index 8de53db86..16879f831 100644 --- a/frontend/src/@types/settings.d.ts +++ b/frontend/src/@types/settings.d.ts @@ -28,6 +28,8 @@ interface Settings { declare namespace Settings { interface General { adaptive_searching: boolean; + adaptive_searching_delay: string; + adaptive_searching_delta: string; anti_captcha_provider?: string; auto_update: boolean; base_url?: string; diff --git a/frontend/src/App/Header.tsx b/frontend/src/App/Header.tsx index 9ca33a574..0f91cadae 100644 --- a/frontend/src/App/Header.tsx +++ b/frontend/src/App/Header.tsx @@ -5,6 +5,7 @@ import { faUser, } from "@fortawesome/free-solid-svg-icons"; import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; +import { uniqueId } from "lodash"; import React, { FunctionComponent, useMemo } from "react"; import { Button, @@ -35,16 +36,22 @@ async function SearchItem(text: string) { return results.map((v) => { let link: string; + let id: string; if (v.sonarrSeriesId) { link = `/series/${v.sonarrSeriesId}`; + id = `series-${v.sonarrSeriesId}`; } else if (v.radarrId) { link = `/movies/${v.radarrId}`; + id = `movie-${v.radarrId}`; } else { link = ""; + id = uniqueId("unknown"); } + return { name: `${v.title} (${v.year})`, link, + id, }; }); } diff --git a/frontend/src/Settings/Languages/modal.tsx b/frontend/src/Settings/Languages/modal.tsx index 20d1943a9..7371267dc 100644 --- a/frontend/src/Settings/Languages/modal.tsx +++ b/frontend/src/Settings/Languages/modal.tsx @@ -13,6 +13,7 @@ import { ActionButton, BaseModal, BaseModalProps, + Chips, LanguageSelector, Selector, SimpleTable, @@ -31,6 +32,8 @@ function createDefaultProfile(): Language.Profile { name: "", items: [], cutoff: null, + mustContain: [], + mustNotContain: [], }; } @@ -260,6 +263,28 @@ const LanguagesProfileModal: FunctionComponent = ( > Ignore others if existing + + updateProfile("mustContain", mc)} + > + + Subtitles release info must include one of those words or they will be + excluded from search results (regex supported). + + + + { + updateProfile("mustNotContain", mnc); + }} + > + + Subtitles release info including one of those words (case insensitive) + will be excluded from search results (regex supported). + + ); }; diff --git a/frontend/src/Settings/Languages/table.tsx b/frontend/src/Settings/Languages/table.tsx index 4547e3198..10b71ca60 100644 --- a/frontend/src/Settings/Languages/table.tsx +++ b/frontend/src/Settings/Languages/table.tsx @@ -94,6 +94,40 @@ const Table: FunctionComponent = () => { }); }, }, + { + Header: "Must contain", + accessor: "mustContain", + Cell: (row) => { + const items = row.value; + if (!items) { + return false; + } + return items.map((v) => { + return ( + + {v} + + ); + }); + }, + }, + { + Header: "Must not contain", + accessor: "mustNotContain", + Cell: (row) => { + const items = row.value; + if (!items) { + return false; + } + return items.map((v) => { + return ( + + {v} + + ); + }); + }, + }, { accessor: "profileId", Cell: ({ row, update }) => { @@ -138,6 +172,8 @@ const Table: FunctionComponent = () => { name: "", items: [], cutoff: null, + mustContain: [], + mustNotContain: [], }; showModal("profile", profile); }} diff --git a/frontend/src/Settings/Providers/list.ts b/frontend/src/Settings/Providers/list.ts index dc076eb1c..e1e24a96c 100644 --- a/frontend/src/Settings/Providers/list.ts +++ b/frontend/src/Settings/Providers/list.ts @@ -21,6 +21,10 @@ export const ProviderList: Readonly = [ defaultKey: { username: "", password: "", + vip: false, + }, + keyNameOverride: { + vip: "VIP", }, }, { key: "argenteam", description: "LATAM Spanish Subtitles Provider" }, @@ -46,6 +50,21 @@ export const ProviderList: Readonly = [ key: "bsplayer", name: "BSplayer", }, + { + key: "embeddedsubtitles", + name: "Embedded Subtitles", + description: "Embedded Subtitles from your Media Files", + defaultKey: { + include_srt: true, + include_ass: true, + }, + message: + "Warning for cloud users: this provider needs to read the entire file in order to extract subtitles.", + keyNameOverride: { + include_srt: "Include SRT", + include_ass: "Include ASS (will be converted to SRT)", + }, + }, { key: "greeksubs", name: "GreekSubs", diff --git a/frontend/src/Settings/Sonarr/index.tsx b/frontend/src/Settings/Sonarr/index.tsx index 6f9140d35..75ae9b3a1 100644 --- a/frontend/src/Settings/Sonarr/index.tsx +++ b/frontend/src/Settings/Sonarr/index.tsx @@ -97,6 +97,16 @@ const SettingsSonarrView: FunctionComponent = () => { episodes in Sonarr. + + + + Episodes from season zero (extras) from automatic download of + subtitles. + + diff --git a/frontend/src/Settings/Subtitles/index.tsx b/frontend/src/Settings/Subtitles/index.tsx index 4f5f05db9..20bdc72d7 100644 --- a/frontend/src/Settings/Subtitles/index.tsx +++ b/frontend/src/Settings/Subtitles/index.tsx @@ -10,7 +10,13 @@ import { Slider, Text, } from "../components"; -import { antiCaptchaOption, colorOptions, folderOptions } from "./options"; +import { + adaptiveSearchingDelayOption, + adaptiveSearchingDeltaOption, + antiCaptchaOption, + colorOptions, + folderOptions, +} from "./options"; const subzeroOverride = (key: string) => { return (settings: Settings) => { @@ -124,16 +130,44 @@ const SettingsSubtitlesView: FunctionComponent = () => { - - - - When searching for subtitles, Bazarr will search less frequently to - limit call to providers. - - + + + + + + When searching for subtitles, Bazarr will reduce search + frequency to limit call to providers. + + + + + + (v === undefined ? "3w" : v)} + options={adaptiveSearchingDelayOption} + > + + How much weeks must Bazarr wait after initial search to reduce + search frequency. + + + + (v === undefined ? "1w" : v)} + options={adaptiveSearchingDeltaOption} + > + + How often should Bazarr search for subtitles when in adaptive + search mode. + + + + [] = [ }, ]; +export const adaptiveSearchingDelayOption: SelectorOption[] = [ + { + label: "1 week", + value: "1w", + }, + { + label: "2 weeks", + value: "2w", + }, + { + label: "3 weeks", + value: "3w", + }, + { + label: "4 weeks", + value: "4w", + }, +]; + +export const adaptiveSearchingDeltaOption: SelectorOption[] = [ + { + label: "3 days", + value: "3d", + }, + { + label: "1 week", + value: "1w", + }, + { + label: "2 weeks", + value: "2w", + }, + { + label: "3 weeks", + value: "3w", + }, + { + label: "4 weeks", + value: "4w", + }, +]; + function buildColor(name: string) { return `color(name=${name})`; } diff --git a/frontend/src/Settings/components/forms.tsx b/frontend/src/Settings/components/forms.tsx index 844fe26ab..6ffe92f30 100644 --- a/frontend/src/Settings/components/forms.tsx +++ b/frontend/src/Settings/components/forms.tsx @@ -176,11 +176,11 @@ export const Chips: FunctionComponent = (props) => { const update = useSingleUpdate(); - const defaultValue = useLatest(settingKey, isArray, override); + const value = useLatest(settingKey, isArray, override); return ( { update(v, settingKey); }} diff --git a/frontend/src/components/SearchBar.tsx b/frontend/src/components/SearchBar.tsx index 66c5db321..86ad517a8 100644 --- a/frontend/src/components/SearchBar.tsx +++ b/frontend/src/components/SearchBar.tsx @@ -10,6 +10,7 @@ import { useHistory } from "react-router"; import { useThrottle } from "rooks"; export interface SearchResult { + id: string; name: string; link?: string; } @@ -58,7 +59,7 @@ export const SearchBar: FunctionComponent = ({ const items = useMemo(() => { const its = results.map((v) => ( diff --git a/frontend/src/components/inputs/Chips.tsx b/frontend/src/components/inputs/Chips.tsx index ce136731a..1be0050a0 100644 --- a/frontend/src/components/inputs/Chips.tsx +++ b/frontend/src/components/inputs/Chips.tsx @@ -3,6 +3,7 @@ import React, { FunctionComponent, KeyboardEvent, useCallback, + useEffect, useMemo, useRef, useState, @@ -14,15 +15,31 @@ const SplitKeys = ["Tab", "Enter", " ", ",", ";"]; export interface ChipsProps { disabled?: boolean; defaultValue?: readonly string[]; + value?: readonly string[]; onChange?: (v: string[]) => void; } export const Chips: FunctionComponent = ({ defaultValue, + value, disabled, onChange, }) => { - const [chips, setChips] = useState(defaultValue ?? []); + const [chips, setChips] = useState>(() => { + if (value) { + return value; + } + if (defaultValue) { + return defaultValue; + } + return []; + }); + + useEffect(() => { + if (value) { + setChips(value); + } + }, [value]); const input = useRef(null); diff --git a/frontend/src/components/modals/toolOptions.ts b/frontend/src/components/modals/toolOptions.ts index 27bfcbb07..5639cd4d0 100644 --- a/frontend/src/components/modals/toolOptions.ts +++ b/frontend/src/components/modals/toolOptions.ts @@ -13,8 +13,8 @@ export const availableTranslation = { ca: "catalan", ceb: "cebuano", ny: "chichewa", - "zh-cn": "chinese (simplified)", - "zh-tw": "chinese (traditional)", + zh: "chinese (simplified)", + zt: "chinese (traditional)", co: "corsican", hr: "croatian", cs: "czech", diff --git a/libs/apprise/Apprise.py b/libs/apprise/Apprise.py index b95da22a7..8930b2a77 100644 --- a/libs/apprise/Apprise.py +++ b/libs/apprise/Apprise.py @@ -34,6 +34,7 @@ from .common import MATCH_ALL_TAG from .utils import is_exclusive_match from .utils import parse_list from .utils import parse_urls +from .utils import cwe312_url from .logger import logger from .AppriseAsset import AppriseAsset @@ -58,13 +59,15 @@ class Apprise(object): """ - def __init__(self, servers=None, asset=None, debug=False): + def __init__(self, servers=None, asset=None, location=None, debug=False): """ Loads a set of server urls while applying the Asset() module to each if specified. If no asset is provided, then the default asset is used. + Optionally specify a global ContentLocation for a more strict means + of handling Attachments. """ # Initialize a server list of URLs @@ -87,6 +90,11 @@ class Apprise(object): # Set our debug flag self.debug = debug + # Store our hosting location for optional strict rule handling + # of Attachments. Setting this to None removes any attachment + # restrictions. + self.location = location + @staticmethod def instantiate(url, asset=None, tag=None, suppress_exceptions=True): """ @@ -116,9 +124,14 @@ class Apprise(object): # Initialize our result set results = None + # Prepare our Asset Object + asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() + if isinstance(url, six.string_types): # Acquire our url tokens - results = plugins.url_to_dict(url) + results = plugins.url_to_dict( + url, secure_logging=asset.secure_logging) + if results is None: # Failed to parse the server URL; detailed logging handled # inside url_to_dict - nothing to report here. @@ -132,25 +145,40 @@ class Apprise(object): # schema is a mandatory dictionary item as it is the only way # we can index into our loaded plugins logger.error('Dictionary does not include a "schema" entry.') - logger.trace('Invalid dictionary unpacked as:{}{}'.format( - os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) for k, v in results.items()]))) + logger.trace( + 'Invalid dictionary unpacked as:{}{}'.format( + os.linesep, os.linesep.join( + ['{}="{}"'.format(k, v) + for k, v in results.items()]))) return None - logger.trace('Dictionary unpacked as:{}{}'.format( - os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) for k, v in results.items()]))) + logger.trace( + 'Dictionary unpacked as:{}{}'.format( + os.linesep, os.linesep.join( + ['{}="{}"'.format(k, v) for k, v in results.items()]))) + # Otherwise we handle the invalid input specified else: - logger.error('Invalid URL specified: {}'.format(url)) + logger.error( + 'An invalid URL type (%s) was specified for instantiation', + type(url)) + return None + + if not plugins.SCHEMA_MAP[results['schema']].enabled: + # + # First Plugin Enable Check (Pre Initialization) + # + + # Plugin has been disabled at a global level + logger.error( + '%s:// is disabled on this system.', results['schema']) return None # Build a list of tags to associate with the newly added notifications results['tag'] = set(parse_list(tag)) - # Prepare our Asset Object - results['asset'] = \ - asset if isinstance(asset, AppriseAsset) else AppriseAsset() + # Set our Asset Object + results['asset'] = asset if suppress_exceptions: try: @@ -159,14 +187,21 @@ class Apprise(object): plugin = plugins.SCHEMA_MAP[results['schema']](**results) # Create log entry of loaded URL - logger.debug('Loaded {} URL: {}'.format( - plugins.SCHEMA_MAP[results['schema']].service_name, - plugin.url())) + logger.debug( + 'Loaded {} URL: {}'.format( + plugins.SCHEMA_MAP[results['schema']].service_name, + plugin.url(privacy=asset.secure_logging))) except Exception: + # CWE-312 (Secure Logging) Handling + loggable_url = url if not asset.secure_logging \ + else cwe312_url(url) + # the arguments are invalid or can not be used. - logger.error('Could not load {} URL: {}'.format( - plugins.SCHEMA_MAP[results['schema']].service_name, url)) + logger.error( + 'Could not load {} URL: {}'.format( + plugins.SCHEMA_MAP[results['schema']].service_name, + loggable_url)) return None else: @@ -174,6 +209,24 @@ class Apprise(object): # URL information but don't wrap it in a try catch plugin = plugins.SCHEMA_MAP[results['schema']](**results) + if not plugin.enabled: + # + # Second Plugin Enable Check (Post Initialization) + # + + # Service/Plugin is disabled (on a more local level). This is a + # case where the plugin was initially enabled but then after the + # __init__() was called under the hood something pre-determined + # that it could no longer be used. + + # The only downside to doing it this way is services are + # initialized prior to returning the details() if 3rd party tools + # are polling what is available. These services that become + # disabled thereafter are shown initially that they can be used. + logger.error( + '%s:// has become disabled on this system.', results['schema']) + return None + return plugin def add(self, servers, asset=None, tag=None): @@ -286,7 +339,8 @@ class Apprise(object): return def notify(self, body, title='', notify_type=NotifyType.INFO, - body_format=None, tag=MATCH_ALL_TAG, attach=None): + body_format=None, tag=MATCH_ALL_TAG, attach=None, + interpret_escapes=None): """ Send a notification to all of the plugins previously loaded. @@ -306,47 +360,158 @@ class Apprise(object): Attach can contain a list of attachment URLs. attach can also be represented by a an AttachBase() (or list of) object(s). This identifies the products you wish to notify + + Set interpret_escapes to True if you want to pre-escape a string + such as turning a \n into an actual new line, etc. + """ + + if ASYNCIO_SUPPORT: + return py3compat.asyncio.tosync( + self.async_notify( + body, title, + notify_type=notify_type, body_format=body_format, + tag=tag, attach=attach, + interpret_escapes=interpret_escapes, + ), + debug=self.debug + ) + + else: + try: + results = list( + self._notifyall( + Apprise._notifyhandler, + body, title, + notify_type=notify_type, body_format=body_format, + tag=tag, attach=attach, + interpret_escapes=interpret_escapes, + ) + ) + + except TypeError: + # No notifications sent, and there was an internal error. + return False + + else: + if len(results) > 0: + # All notifications sent, return False if any failed. + return all(results) + + else: + # No notifications sent. + return None + + def async_notify(self, *args, **kwargs): + """ + Send a notification to all of the plugins previously loaded, for + asynchronous callers. This method is an async method that should be + awaited on, even if it is missing the async keyword in its signature. + (This is omitted to preserve syntax compatibility with Python 2.) + + The arguments are identical to those of Apprise.notify(). This method + is not available in Python 2. + """ + + try: + coroutines = list( + self._notifyall( + Apprise._notifyhandlerasync, *args, **kwargs)) + + except TypeError: + # No notifications sent, and there was an internal error. + return py3compat.asyncio.toasyncwrap(False) + + else: + if len(coroutines) > 0: + # All notifications sent, return False if any failed. + return py3compat.asyncio.notify(coroutines) + + else: + # No notifications sent. + return py3compat.asyncio.toasyncwrap(None) + + @staticmethod + def _notifyhandler(server, **kwargs): + """ + The synchronous notification sender. Returns True if the notification + sent successfully. + """ + + try: + # Send notification + return server.notify(**kwargs) + + except TypeError: + # These our our internally thrown notifications + return False + + except Exception: + # A catch all so we don't have to abort early + # just because one of our plugins has a bug in it. + logger.exception("Unhandled Notification Exception") + return False + + @staticmethod + def _notifyhandlerasync(server, **kwargs): + """ + The asynchronous notification sender. Returns a coroutine that yields + True if the notification sent successfully. + """ + + if server.asset.async_mode: + return server.async_notify(**kwargs) + + else: + # Send the notification immediately, and wrap the result in a + # coroutine. + status = Apprise._notifyhandler(server, **kwargs) + return py3compat.asyncio.toasyncwrap(status) + + def _notifyall(self, handler, body, title='', notify_type=NotifyType.INFO, + body_format=None, tag=MATCH_ALL_TAG, attach=None, + interpret_escapes=None): + """ + Creates notifications for all of the plugins loaded. + + Returns a generator that calls handler for each notification. The first + and only argument supplied to handler is the server, and the keyword + arguments are exactly as they would be passed to server.notify(). """ if len(self) == 0: # Nothing to notify - return False - - # Initialize our return result which only turns to True if we send - # at least one valid notification - status = None + raise TypeError("No service(s) to notify") if not (title or body): - return False + raise TypeError("No message content specified to deliver") + + if six.PY2: + # Python 2.7.x Unicode Character Handling + # Ensure we're working with utf-8 + if isinstance(title, unicode): # noqa: F821 + title = title.encode('utf-8') + + if isinstance(body, unicode): # noqa: F821 + body = body.encode('utf-8') # Tracks conversions conversion_map = dict() # Prepare attachments if required if attach is not None and not isinstance(attach, AppriseAttachment): - try: - attach = AppriseAttachment(attach, asset=self.asset) - - except TypeError: - # bad attachments - return False + attach = AppriseAttachment( + attach, asset=self.asset, location=self.location) # Allow Asset default value body_format = self.asset.body_format \ if body_format is None else body_format - # for asyncio support; we track a list of our servers to notify - # sequentially - coroutines = [] + # Allow Asset default value + interpret_escapes = self.asset.interpret_escapes \ + if interpret_escapes is None else interpret_escapes # Iterate over our loaded plugins for server in self.find(tag): - if status is None: - # We have at least one server to notify; change status - # to be a default value of True from now (purely an - # initialiation at this point) - status = True - # If our code reaches here, we either did not define a tag (it # was set to None), or we did define a tag and the logic above # determined we need to notify the service it's associated with @@ -396,48 +561,59 @@ class Apprise(object): # Store entry directly conversion_map[server.notify_format] = body - if ASYNCIO_SUPPORT and server.asset.async_mode: - # Build a list of servers requiring notification - # that will be triggered asynchronously afterwards - coroutines.append(server.async_notify( - body=conversion_map[server.notify_format], - title=title, - notify_type=notify_type, - attach=attach)) + if interpret_escapes: + # + # Escape our content + # - # We gather at this point and notify at the end - continue + try: + # Added overhead required due to Python 3 Encoding Bug + # identified here: https://bugs.python.org/issue21331 + conversion_map[server.notify_format] = \ + conversion_map[server.notify_format]\ + .encode('ascii', 'backslashreplace')\ + .decode('unicode-escape') - try: - # Send notification - if not server.notify( - body=conversion_map[server.notify_format], - title=title, - notify_type=notify_type, - attach=attach): + except UnicodeDecodeError: # pragma: no cover + # This occurs using a very old verion of Python 2.7 such + # as the one that ships with CentOS/RedHat 7.x (v2.7.5). + conversion_map[server.notify_format] = \ + conversion_map[server.notify_format] \ + .decode('string_escape') - # Toggle our return status flag - status = False + except AttributeError: + # Must be of string type + logger.error('Failed to escape message body') + raise TypeError - except TypeError: - # These our our internally thrown notifications - status = False + if title: + try: + # Added overhead required due to Python 3 Encoding Bug + # identified here: https://bugs.python.org/issue21331 + title = title\ + .encode('ascii', 'backslashreplace')\ + .decode('unicode-escape') - except Exception: - # A catch all so we don't have to abort early - # just because one of our plugins has a bug in it. - logger.exception("Notification Exception") - status = False + except UnicodeDecodeError: # pragma: no cover + # This occurs using a very old verion of Python 2.7 + # such as the one that ships with CentOS/RedHat 7.x + # (v2.7.5). + title = title.decode('string_escape') - if coroutines: - # perform our async notification(s) - if not py3compat.asyncio.notify(coroutines, debug=self.debug): - # Toggle our status only if we had a failure - status = False + except AttributeError: + # Must be of string type + logger.error('Failed to escape message title') + raise TypeError - return status + yield handler( + server, + body=conversion_map[server.notify_format], + title=title, + notify_type=notify_type, + attach=attach + ) - def details(self, lang=None): + def details(self, lang=None, show_requirements=False, show_disabled=False): """ Returns the details associated with the Apprise object @@ -453,8 +629,27 @@ class Apprise(object): 'asset': self.asset.details(), } - # to add it's mapping to our hash table for plugin in set(plugins.SCHEMA_MAP.values()): + # Iterate over our hashed plugins and dynamically build details on + # their status: + + content = { + 'service_name': getattr(plugin, 'service_name', None), + 'service_url': getattr(plugin, 'service_url', None), + 'setup_url': getattr(plugin, 'setup_url', None), + # Placeholder - populated below + 'details': None + } + + # Standard protocol(s) should be None or a tuple + enabled = getattr(plugin, 'enabled', True) + if not show_disabled and not enabled: + # Do not show inactive plugins + continue + + elif show_disabled: + # Add current state to response + content['enabled'] = enabled # Standard protocol(s) should be None or a tuple protocols = getattr(plugin, 'protocol', None) @@ -466,31 +661,35 @@ class Apprise(object): if isinstance(secure_protocols, six.string_types): secure_protocols = (secure_protocols, ) + # Add our protocol details to our content + content.update({ + 'protocols': protocols, + 'secure_protocols': secure_protocols, + }) + if not lang: # Simply return our results - details = plugins.details(plugin) + content['details'] = plugins.details(plugin) + if show_requirements: + content['requirements'] = plugins.requirements(plugin) + else: # Emulate the specified language when returning our results with self.locale.lang_at(lang): - details = plugins.details(plugin) + content['details'] = plugins.details(plugin) + if show_requirements: + content['requirements'] = plugins.requirements(plugin) # Build our response object - response['schemas'].append({ - 'service_name': getattr(plugin, 'service_name', None), - 'service_url': getattr(plugin, 'service_url', None), - 'setup_url': getattr(plugin, 'setup_url', None), - 'protocols': protocols, - 'secure_protocols': secure_protocols, - 'details': details, - }) + response['schemas'].append(content) return response - def urls(self): + def urls(self, privacy=False): """ Returns all of the loaded URLs defined in this apprise object. """ - return [x.url() for x in self.servers] + return [x.url(privacy=privacy) for x in self.servers] def pop(self, index): """ @@ -592,3 +791,7 @@ class Apprise(object): """ return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig)) else len(s.servers()) for s in self.servers]) + + +if six.PY2: + del Apprise.async_notify diff --git a/libs/apprise/Apprise.pyi b/libs/apprise/Apprise.pyi new file mode 100644 index 000000000..919d370db --- /dev/null +++ b/libs/apprise/Apprise.pyi @@ -0,0 +1,63 @@ +from typing import Any, Dict, List, Iterable, Iterator, Optional + +from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase, + NotifyBase, NotifyFormat, NotifyType) +from .common import ContentLocation + +_Server = Union[str, ConfigBase, NotifyBase, AppriseConfig] +_Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]] +# Can't define this recursively as mypy doesn't support recursive types: +# https://github.com/python/mypy/issues/731 +_Tag = Union[str, Iterable[Union[str, Iterable[str]]]] + +class Apprise: + def __init__( + self, + servers: _Servers = ..., + asset: Optional[AppriseAsset] = ..., + location: Optional[ContentLocation] = ..., + debug: bool = ... + ) -> None: ... + @staticmethod + def instantiate( + url: Union[str, Dict[str, NotifyBase]], + asset: Optional[AppriseAsset] = ..., + tag: Optional[_Tag] = ..., + suppress_exceptions: bool = ... + ) -> NotifyBase: ... + def add( + self, + servers: _Servers = ..., + asset: Optional[AppriseAsset] = ..., + tag: Optional[_Tag] = ... + ) -> bool: ... + def clear(self) -> None: ... + def find(self, tag: str = ...) -> Iterator[Apprise]: ... + def notify( + self, + body: str, + title: str = ..., + notify_type: NotifyType = ..., + body_format: NotifyFormat = ..., + tag: _Tag = ..., + attach: Optional[AppriseAttachment] = ..., + interpret_escapes: Optional[bool] = ... + ) -> bool: ... + async def async_notify( + self, + body: str, + title: str = ..., + notify_type: NotifyType = ..., + body_format: NotifyFormat = ..., + tag: _Tag = ..., + attach: Optional[AppriseAttachment] = ..., + interpret_escapes: Optional[bool] = ... + ) -> bool: ... + def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ... + def urls(self, privacy: bool = ...) -> Iterable[str]: ... + def pop(self, index: int) -> ConfigBase: ... + def __getitem__(self, index: int) -> ConfigBase: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __iter__(self) -> Iterator[ConfigBase]: ... + def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/AppriseAsset.py b/libs/apprise/AppriseAsset.py index 123da7225..e2e95b4a7 100644 --- a/libs/apprise/AppriseAsset.py +++ b/libs/apprise/AppriseAsset.py @@ -24,7 +24,7 @@ # THE SOFTWARE. import re - +from uuid import uuid4 from os.path import join from os.path import dirname from os.path import isfile @@ -105,6 +105,36 @@ class AppriseAsset(object): # notifications are sent sequentially (one after another) async_mode = True + # Whether or not to interpret escapes found within the input text prior + # to passing it upstream. Such as converting \t to an actual tab and \n + # to a new line. + interpret_escapes = False + + # For more detail see CWE-312 @ + # https://cwe.mitre.org/data/definitions/312.html + # + # By enabling this, the logging output has additional overhead applied to + # it preventing secure password and secret information from being + # displayed in the logging. Since there is overhead involved in performing + # this cleanup; system owners who run in a very isolated environment may + # choose to disable this for a slight performance bump. It is recommended + # that you leave this option as is otherwise. + secure_logging = True + + # All internal/system flags are prefixed with an underscore (_) + # These can only be initialized using Python libraries and are not picked + # up from (yaml) configuration files (if set) + + # An internal counter that is used by AppriseAPI + # (https://github.com/caronc/apprise-api). The idea is to allow one + # instance of AppriseAPI to call another, but to track how many times + # this occurs. It's intent is to prevent a loop where an AppriseAPI + # Server calls itself (or loops indefinitely) + _recursion = 0 + + # A unique identifer we can use to associate our calling source + _uid = str(uuid4()) + def __init__(self, **kwargs): """ Asset Initialization diff --git a/libs/apprise/AppriseAsset.pyi b/libs/apprise/AppriseAsset.pyi new file mode 100644 index 000000000..08303341b --- /dev/null +++ b/libs/apprise/AppriseAsset.pyi @@ -0,0 +1,34 @@ +from typing import Dict, Optional + +from . import NotifyFormat, NotifyType + +class AppriseAsset: + app_id: str + app_desc: str + app_url: str + html_notify_map: Dict[NotifyType, str] + default_html_color: str + default_extension: str + theme: Optional[str] + image_url_mask: str + image_url_logo: str + image_path_mask: Optional[str] + body_format: Optional[NotifyFormat] + async_mode: bool + interpret_escapes: bool + def __init__( + self, + app_id: str = ..., + app_desc: str = ..., + app_url: str = ..., + html_notify_map: Dict[NotifyType, str] = ..., + default_html_color: str = ..., + default_extension: str = ..., + theme: Optional[str] = ..., + image_url_mask: str = ..., + image_url_logo: str = ..., + image_path_mask: Optional[str] = ..., + body_format: Optional[NotifyFormat] = ..., + async_mode: bool = ..., + interpret_escapes: bool = ... + ) -> None: ... \ No newline at end of file diff --git a/libs/apprise/AppriseAttachment.py b/libs/apprise/AppriseAttachment.py index a8f27e179..37d2c0901 100644 --- a/libs/apprise/AppriseAttachment.py +++ b/libs/apprise/AppriseAttachment.py @@ -29,6 +29,8 @@ from . import attachment from . import URLBase from .AppriseAsset import AppriseAsset from .logger import logger +from .common import ContentLocation +from .common import CONTENT_LOCATIONS from .utils import GET_SCHEMA_RE @@ -38,7 +40,8 @@ class AppriseAttachment(object): """ - def __init__(self, paths=None, asset=None, cache=True, **kwargs): + def __init__(self, paths=None, asset=None, cache=True, location=None, + **kwargs): """ Loads all of the paths/urls specified (if any). @@ -59,6 +62,25 @@ class AppriseAttachment(object): It's also worth nothing that the cache value is only set to elements that are not already of subclass AttachBase() + + Optionally set your current ContentLocation in the location argument. + This is used to further handle attachments. The rules are as follows: + - INACCESSIBLE: You simply have disabled use of the object; no + attachments will be retrieved/handled. + - HOSTED: You are hosting an attachment service for others. + In these circumstances all attachments that are LOCAL + based (such as file://) will not be allowed. + - LOCAL: The least restrictive mode as local files can be + referenced in addition to hosted. + + In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will + continue to be inaccessible. However if you set this field (location) + to None (it's default value) the attachment location category will not + be tested in any way (all attachment types will be allowed). + + The location field is also a global option that can be set when + initializing the Apprise object. + """ # Initialize our attachment listings @@ -71,6 +93,15 @@ class AppriseAttachment(object): self.asset = \ asset if isinstance(asset, AppriseAsset) else AppriseAsset() + if location is not None and location not in CONTENT_LOCATIONS: + msg = "An invalid Attachment location ({}) was specified." \ + .format(location) + logger.warning(msg) + raise TypeError(msg) + + # Store our location + self.location = location + # Now parse any paths specified if paths is not None: # Store our path(s) @@ -123,26 +154,45 @@ class AppriseAttachment(object): # Iterate over our attachments for _attachment in attachments: - - if isinstance(_attachment, attachment.AttachBase): - # Go ahead and just add our attachment into our list - self.attachments.append(_attachment) + if self.location == ContentLocation.INACCESSIBLE: + logger.warning( + "Attachments are disabled; ignoring {}" + .format(_attachment)) + return_status = False continue - elif not isinstance(_attachment, six.string_types): + if isinstance(_attachment, six.string_types): + logger.debug("Loading attachment: {}".format(_attachment)) + # Instantiate ourselves an object, this function throws or + # returns None if it fails + instance = AppriseAttachment.instantiate( + _attachment, asset=asset, cache=cache) + if not isinstance(instance, attachment.AttachBase): + return_status = False + continue + + elif not isinstance(_attachment, attachment.AttachBase): logger.warning( "An invalid attachment (type={}) was specified.".format( type(_attachment))) return_status = False continue - logger.debug("Loading attachment: {}".format(_attachment)) + else: + # our entry is of type AttachBase, so just go ahead and point + # our instance to it for some post processing below + instance = _attachment - # Instantiate ourselves an object, this function throws or - # returns None if it fails - instance = AppriseAttachment.instantiate( - _attachment, asset=asset, cache=cache) - if not isinstance(instance, attachment.AttachBase): + # Apply some simple logic if our location flag is set + if self.location and (( + self.location == ContentLocation.HOSTED + and instance.location != ContentLocation.HOSTED) + or instance.location == ContentLocation.INACCESSIBLE): + logger.warning( + "Attachment was disallowed due to accessibility " + "restrictions ({}->{}): {}".format( + self.location, instance.location, + instance.url(privacy=True))) return_status = False continue diff --git a/libs/apprise/AppriseAttachment.pyi b/libs/apprise/AppriseAttachment.pyi new file mode 100644 index 000000000..d68eccc13 --- /dev/null +++ b/libs/apprise/AppriseAttachment.pyi @@ -0,0 +1,38 @@ +from typing import Any, Iterable, Optional, Union + +from . import AppriseAsset, ContentLocation +from .attachment import AttachBase + +_Attachment = Union[str, AttachBase] +_Attachments = Iterable[_Attachment] + +class AppriseAttachment: + def __init__( + self, + paths: Optional[_Attachments] = ..., + asset: Optional[AppriseAttachment] = ..., + cache: bool = ..., + location: Optional[ContentLocation] = ..., + **kwargs: Any + ) -> None: ... + def add( + self, + attachments: _Attachments, + asset: Optional[AppriseAttachment] = ..., + cache: Optional[bool] = ... + ) -> bool: ... + @staticmethod + def instantiate( + url: str, + asset: Optional[AppriseAsset] = ..., + cache: Optional[bool] = ..., + suppress_exceptions: bool = ... + ) -> NotifyBase: ... + def clear(self) -> None: ... + def size(self) -> int: ... + def pop(self, index: int = ...) -> AttachBase: ... + def __getitem__(self, index: int) -> AttachBase: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __iter__(self) -> Iterator[AttachBase]: ... + def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/AppriseConfig.pyi b/libs/apprise/AppriseConfig.pyi new file mode 100644 index 000000000..36fa9c065 --- /dev/null +++ b/libs/apprise/AppriseConfig.pyi @@ -0,0 +1,49 @@ +from typing import Any, Iterable, Iterator, List, Optional, Union + +from . import AppriseAsset, NotifyBase +from .config import ConfigBase + +_Configs = Union[ConfigBase, str, Iterable[str]] + +class AppriseConfig: + def __init__( + self, + paths: Optional[_Configs] = ..., + asset: Optional[AppriseAsset] = ..., + cache: bool = ..., + recursion: int = ..., + insecure_includes: bool = ..., + **kwargs: Any + ) -> None: ... + def add( + self, + configs: _Configs, + asset: Optional[AppriseAsset] = ..., + cache: bool = ..., + recursion: Optional[bool] = ..., + insecure_includes: Optional[bool] = ... + ) -> bool: ... + def add_config( + self, + content: str, + asset: Optional[AppriseAsset] = ..., + tag: Optional[str] = ..., + format: Optional[str] = ..., + recursion: Optional[int] = ..., + insecure_includes: Optional[bool] = ... + ) -> bool: ... + def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ... + def instantiate( + url: str, + asset: Optional[AppriseAsset] = ..., + tag: Optional[str] = ..., + cache: Optional[bool] = ... + ) -> NotifyBase: ... + def clear(self) -> None: ... + def server_pop(self, index: int) -> ConfigBase: ... + def pop(self, index: int = ...) -> ConfigBase: ... + def __getitem__(self, index: int) -> ConfigBase: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... + def __iter__(self) -> Iterator[ConfigBase]: ... + def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/URLBase.py b/libs/apprise/URLBase.py index 78109ae48..f5428dbb1 100644 --- a/libs/apprise/URLBase.py +++ b/libs/apprise/URLBase.py @@ -25,7 +25,7 @@ import re import six -import logging +from .logger import logger from time import sleep from datetime import datetime from xml.sax.saxutils import escape as sax_escape @@ -47,6 +47,7 @@ from .AppriseAsset import AppriseAsset from .utils import parse_url from .utils import parse_bool from .utils import parse_list +from .utils import parse_phone_no # Used to break a path list into parts PATHSPLIT_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+') @@ -115,8 +116,8 @@ class URLBase(object): # Secure sites should be verified against a Certificate Authority verify_certificate = True - # Logging - logger = logging.getLogger(__name__) + # Logging to our global logger + logger = logger # Define a default set of template arguments used for dynamically building # details about our individual plugins for developers. @@ -280,7 +281,7 @@ class URLBase(object): self._last_io_datetime = reference return - if self.request_rate_per_sec <= 0.0: + if self.request_rate_per_sec <= 0.0 and not wait: # We're done if there is no throttle limit set return @@ -560,6 +561,39 @@ class URLBase(object): return content + @staticmethod + def parse_phone_no(content, unquote=True): + """A wrapper to utils.parse_phone_no() with unquoting support + + Parses a specified set of data and breaks it into a list. + + Args: + content (str): The path to split up into a list. If a list is + provided, then it's individual entries are processed. + + unquote (:obj:`bool`, optional): call unquote on each element + added to the returned list. + + Returns: + list: A unique list containing all of the elements in the path + """ + + if unquote: + try: + content = URLBase.unquote(content) + except TypeError: + # Nothing further to do + return [] + + except AttributeError: + # This exception ONLY gets thrown under Python v2.7 if an + # object() is passed in place of the content + return [] + + content = parse_phone_no(content) + + return content + @property def app_id(self): return self.asset.app_id if self.asset.app_id else '' @@ -636,6 +670,8 @@ class URLBase(object): results['qsd'].get('verify', True)) # Password overrides + if 'password' in results['qsd']: + results['password'] = results['qsd']['password'] if 'pass' in results['qsd']: results['password'] = results['qsd']['pass'] diff --git a/libs/apprise/URLBase.pyi b/libs/apprise/URLBase.pyi new file mode 100644 index 000000000..915885745 --- /dev/null +++ b/libs/apprise/URLBase.pyi @@ -0,0 +1,16 @@ +from logging import logger +from typing import Any, Iterable, Set, Optional + +class URLBase: + service_name: Optional[str] + protocol: Optional[str] + secure_protocol: Optional[str] + request_rate_per_sec: int + socket_connect_timeout: float + socket_read_timeout: float + tags: Set[str] + verify_certificate: bool + logger: logger + def url(self, privacy: bool = ..., *args: Any, **kwargs: Any) -> str: ... + def __contains__(self, tags: Iterable[str]) -> bool: ... + def __str__(self) -> str: ... \ No newline at end of file diff --git a/libs/apprise/__init__.py b/libs/apprise/__init__.py index a2511d286..090261086 100644 --- a/libs/apprise/__init__.py +++ b/libs/apprise/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2020 Chris Caron +# Copyright (C) 2021 Chris Caron # All rights reserved. # # This code is licensed under the MIT License. @@ -23,11 +23,11 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. -__title__ = 'apprise' -__version__ = '0.8.8' +__title__ = 'Apprise' +__version__ = '0.9.6' __author__ = 'Chris Caron' __license__ = 'MIT' -__copywrite__ = 'Copyright (C) 2020 Chris Caron ' +__copywrite__ = 'Copyright (C) 2021 Chris Caron ' __email__ = 'lead2gold@gmail.com' __status__ = 'Production' @@ -41,8 +41,10 @@ from .common import OverflowMode from .common import OVERFLOW_MODES from .common import ConfigFormat from .common import CONFIG_FORMATS -from .common import ConfigIncludeMode -from .common import CONFIG_INCLUDE_MODES +from .common import ContentIncludeMode +from .common import CONTENT_INCLUDE_MODES +from .common import ContentLocation +from .common import CONTENT_LOCATIONS from .URLBase import URLBase from .URLBase import PrivacyMode @@ -55,10 +57,13 @@ from .AppriseAsset import AppriseAsset from .AppriseConfig import AppriseConfig from .AppriseAttachment import AppriseAttachment +# Inherit our logging with our additional entries added to it +from .logger import logging +from .logger import logger +from .logger import LogCapture + # Set default logging handler to avoid "No handler found" warnings. -import logging -from logging import NullHandler -logging.getLogger(__name__).addHandler(NullHandler()) +logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = [ # Core @@ -69,6 +74,10 @@ __all__ = [ 'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode', 'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES', 'ConfigFormat', 'CONFIG_FORMATS', - 'ConfigIncludeMode', 'CONFIG_INCLUDE_MODES', + 'ContentIncludeMode', 'CONTENT_INCLUDE_MODES', + 'ContentLocation', 'CONTENT_LOCATIONS', 'PrivacyMode', + + # Logging + 'logging', 'logger', 'LogCapture', ] diff --git a/libs/apprise/assets/NotifyXML-1.0.xsd b/libs/apprise/assets/NotifyXML-1.0.xsd index d9b7235aa..0e3f8f130 100644 --- a/libs/apprise/assets/NotifyXML-1.0.xsd +++ b/libs/apprise/assets/NotifyXML-1.0.xsd @@ -1,22 +1,23 @@ - + + - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + diff --git a/libs/apprise/assets/NotifyXML-1.1.xsd b/libs/apprise/assets/NotifyXML-1.1.xsd new file mode 100644 index 000000000..cc6dbae65 --- /dev/null +++ b/libs/apprise/assets/NotifyXML-1.1.xsd @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/libs/apprise/attachment/AttachBase.py b/libs/apprise/attachment/AttachBase.py index 1fde66f4b..aa7174fcf 100644 --- a/libs/apprise/attachment/AttachBase.py +++ b/libs/apprise/attachment/AttachBase.py @@ -28,6 +28,7 @@ import time import mimetypes from ..URLBase import URLBase from ..utils import parse_bool +from ..common import ContentLocation from ..AppriseLocale import gettext_lazy as _ @@ -62,6 +63,11 @@ class AttachBase(URLBase): # 5 MB = 5242880 bytes max_file_size = 5242880 + # By default all attachments types are inaccessible. + # Developers of items identified in the attachment plugin directory + # are requried to set a location + location = ContentLocation.INACCESSIBLE + # Here is where we define all of the arguments we accept on the url # such as: schema://whatever/?overflow=upstream&format=text # These act the same way as tokens except they are optional and/or diff --git a/libs/apprise/attachment/AttachBase.pyi b/libs/apprise/attachment/AttachBase.pyi new file mode 100644 index 000000000..9b8eb02a5 --- /dev/null +++ b/libs/apprise/attachment/AttachBase.pyi @@ -0,0 +1,37 @@ +from typing import Any, Dict, Optional + +from .. import ContentLocation + +class AttachBase: + max_detect_buffer_size: int + unknown_mimetype: str + unknown_filename: str + unknown_filename_extension: str + strict: bool + max_file_size: int + location: ContentLocation + template_args: Dict[str, Any] + def __init__( + self, + name: Optional[str] = ..., + mimetype: Optional[str] = ..., + cache: Optional[bool] = ..., + **kwargs: Any + ) -> None: ... + @property + def path(self) -> Optional[str]: ... + @property + def name(self) -> Optional[str]: ... + @property + def mimetype(self) -> Optional[str]: ... + def exists(self) -> bool: ... + def invalidate(self) -> None: ... + def download(self) -> bool: ... + @staticmethod + def parse_url( + url: str, + verify_host: bool = ... + ) -> Dict[str, Any]: ... + def __len__(self) -> int: ... + def __bool__(self) -> bool: ... + def __nonzero__(self) -> bool: ... \ No newline at end of file diff --git a/libs/apprise/attachment/AttachFile.py b/libs/apprise/attachment/AttachFile.py index a8609bd60..20ee15199 100644 --- a/libs/apprise/attachment/AttachFile.py +++ b/libs/apprise/attachment/AttachFile.py @@ -26,6 +26,7 @@ import re import os from .AttachBase import AttachBase +from ..common import ContentLocation from ..AppriseLocale import gettext_lazy as _ @@ -40,6 +41,10 @@ class AttachFile(AttachBase): # The default protocol protocol = 'file' + # Content is local to the same location as the apprise instance + # being called (server-side) + location = ContentLocation.LOCAL + def __init__(self, path, **kwargs): """ Initialize Local File Attachment Object @@ -81,6 +86,10 @@ class AttachFile(AttachBase): validate it. """ + if self.location == ContentLocation.INACCESSIBLE: + # our content is inaccessible + return False + # Ensure any existing content set has been invalidated self.invalidate() diff --git a/libs/apprise/attachment/AttachHTTP.py b/libs/apprise/attachment/AttachHTTP.py index d5396cf85..1d915ad3c 100644 --- a/libs/apprise/attachment/AttachHTTP.py +++ b/libs/apprise/attachment/AttachHTTP.py @@ -29,6 +29,7 @@ import six import requests from tempfile import NamedTemporaryFile from .AttachBase import AttachBase +from ..common import ContentLocation from ..URLBase import PrivacyMode from ..AppriseLocale import gettext_lazy as _ @@ -50,6 +51,9 @@ class AttachHTTP(AttachBase): # The number of bytes in memory to read from the remote source at a time chunk_size = 8192 + # Web based requests are remote/external to our current location + location = ContentLocation.HOSTED + def __init__(self, headers=None, **kwargs): """ Initialize HTTP Object @@ -86,6 +90,10 @@ class AttachHTTP(AttachBase): Perform retrieval of the configuration based on the specified request """ + if self.location == ContentLocation.INACCESSIBLE: + # our content is inaccessible + return False + # Ensure any existing content set has been invalidated self.invalidate() diff --git a/libs/apprise/cli.py b/libs/apprise/cli.py index 690530000..70458c92d 100644 --- a/libs/apprise/cli.py +++ b/libs/apprise/cli.py @@ -26,7 +26,11 @@ import click import logging import platform +import six import sys +import os +import re + from os.path import isfile from os.path import expanduser from os.path import expandvars @@ -39,6 +43,7 @@ from . import AppriseConfig from .utils import parse_list from .common import NOTIFY_TYPES from .common import NOTIFY_FORMATS +from .common import ContentLocation from .logger import logger from . import __title__ @@ -133,6 +138,9 @@ def print_version_msg(): help='Perform a trial run but only prints the notification ' 'services to-be triggered to stdout. Notifications are never ' 'sent using this mode.') +@click.option('--details', '-l', is_flag=True, + help='Prints details about the current services supported by ' + 'Apprise.') @click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH, type=int, help='The number of recursive import entries that can be ' @@ -141,6 +149,8 @@ def print_version_msg(): @click.option('--verbose', '-v', count=True, help='Makes the operation more talkative. Use multiple v to ' 'increase the verbosity. I.e.: -vvvv') +@click.option('--interpret-escapes', '-e', is_flag=True, + help='Enable interpretation of backslash escapes') @click.option('--debug', '-D', is_flag=True, help='Debug mode') @click.option('--version', '-V', is_flag=True, help='Display the apprise version and exit.') @@ -148,7 +158,7 @@ def print_version_msg(): metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',) def main(body, title, config, attach, urls, notification_type, theme, tag, input_format, dry_run, recursion_depth, verbose, disable_async, - debug, version): + details, interpret_escapes, debug, version): """ Send a notification to all of the specified servers identified by their URLs the content provided within the title, body and notification-type. @@ -224,8 +234,15 @@ def main(body, title, config, attach, urls, notification_type, theme, tag, # Prepare our asset asset = AppriseAsset( + # Our body format body_format=input_format, + + # Interpret Escapes + interpret_escapes=interpret_escapes, + + # Set the theme theme=theme, + # Async mode is only used for Python v3+ and allows a user to send # all of their notifications asyncronously. This was made an option # incase there are problems in the future where it's better that @@ -234,18 +251,132 @@ def main(body, title, config, attach, urls, notification_type, theme, tag, ) # Create our Apprise object - a = Apprise(asset=asset, debug=debug) + a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL) - # Load our configuration if no URLs or specified configuration was - # identified on the command line - a.add(AppriseConfig( - paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))] - if not (config or urls) else config, - asset=asset, recursion=recursion_depth)) + if details: + # Print details and exit + results = a.details(show_requirements=True, show_disabled=True) - # Load our inventory up - for url in urls: - a.add(url) + # Sort our results: + plugins = sorted( + results['schemas'], key=lambda i: str(i['service_name'])) + for entry in plugins: + protocols = [] if not entry['protocols'] else \ + [p for p in entry['protocols'] + if isinstance(p, six.string_types)] + protocols.extend( + [] if not entry['secure_protocols'] else + [p for p in entry['secure_protocols'] + if isinstance(p, six.string_types)]) + + if len(protocols) == 1: + # Simplify view by swapping {schema} with the single + # protocol value + + # Convert tuple to list + entry['details']['templates'] = \ + list(entry['details']['templates']) + + for x in range(len(entry['details']['templates'])): + entry['details']['templates'][x] = \ + re.sub( + r'^[^}]+}://', + '{}://'.format(protocols[0]), + entry['details']['templates'][x]) + + click.echo(click.style( + '{} {:<30} '.format( + '+' if entry['enabled'] else '-', + str(entry['service_name'])), + fg="green" if entry['enabled'] else "red", bold=True), + nl=(not entry['enabled'] or len(protocols) == 1)) + + if not entry['enabled']: + if entry['requirements']['details']: + click.echo( + ' ' + str(entry['requirements']['details'])) + + if entry['requirements']['packages_required']: + click.echo(' Python Packages Required:') + for req in entry['requirements']['packages_required']: + click.echo(' - ' + req) + + if entry['requirements']['packages_recommended']: + click.echo(' Python Packages Recommended:') + for req in entry['requirements']['packages_recommended']: + click.echo(' - ' + req) + + # new line padding between entries + click.echo() + continue + + if len(protocols) > 1: + click.echo('| Schema(s): {}'.format( + ', '.join(protocols), + )) + + prefix = ' - ' + click.echo('{}{}'.format( + prefix, + '\n{}'.format(prefix).join(entry['details']['templates']))) + + # new line padding between entries + click.echo() + + sys.exit(0) + + # The priorities of what is accepted are parsed in order below: + # 1. URLs by command line + # 2. Configuration by command line + # 3. URLs by environment variable: APPRISE_URLS + # 4. Configuration by environment variable: APPRISE_CONFIG + # 5. Default Configuration File(s) (if found) + # + if urls: + if tag: + # Ignore any tags specified + logger.warning( + '--tag (-g) entries are ignored when using specified URLs') + tag = None + + # Load our URLs (if any defined) + for url in urls: + a.add(url) + + if config: + # Provide a warning to the end user if they specified both + logger.warning( + 'You defined both URLs and a --config (-c) entry; ' + 'Only the URLs will be referenced.') + + elif config: + # We load our configuration file(s) now only if no URLs were specified + # Specified config entries trump all + a.add(AppriseConfig( + paths=config, asset=asset, recursion=recursion_depth)) + + elif os.environ.get('APPRISE_URLS', '').strip(): + logger.debug('Loading provided APPRISE_URLS environment variable') + if tag: + # Ignore any tags specified + logger.warning( + '--tag (-g) entries are ignored when using specified URLs') + tag = None + + # Attempt to use our APPRISE_URLS environment variable (if populated) + a.add(os.environ['APPRISE_URLS'].strip()) + + elif os.environ.get('APPRISE_CONFIG', '').strip(): + logger.debug('Loading provided APPRISE_CONFIG environment variable') + # Fall back to config environment variable (if populated) + a.add(AppriseConfig( + paths=os.environ['APPRISE_CONFIG'].strip(), + asset=asset, recursion=recursion_depth)) + else: + # Load default configuration + a.add(AppriseConfig( + paths=[f for f in DEFAULT_SEARCH_PATHS if isfile(expanduser(f))], + asset=asset, recursion=recursion_depth)) if len(a) == 0: logger.error( diff --git a/libs/apprise/common.py b/libs/apprise/common.py index 329b5d93f..186bfe1bc 100644 --- a/libs/apprise/common.py +++ b/libs/apprise/common.py @@ -130,28 +130,58 @@ CONFIG_FORMATS = ( ) -class ConfigIncludeMode(object): +class ContentIncludeMode(object): """ - The different Cofiguration inclusion modes. All Configuration - plugins will have one of these associated with it. + The different Content inclusion modes. All content based plugins will + have one of these associated with it. """ - # - Configuration inclusion of same type only; hence a file:// can include + # - Content inclusion of same type only; hence a file:// can include # a file:// # - Cross file inclusion is not allowed unless insecure_includes (a flag) # is set to True. In these cases STRICT acts as type ALWAYS STRICT = 'strict' - # This configuration type can never be included + # This content type can never be included NEVER = 'never' - # File configuration can always be included + # This content can always be included ALWAYS = 'always' -CONFIG_INCLUDE_MODES = ( - ConfigIncludeMode.STRICT, - ConfigIncludeMode.NEVER, - ConfigIncludeMode.ALWAYS, +CONTENT_INCLUDE_MODES = ( + ContentIncludeMode.STRICT, + ContentIncludeMode.NEVER, + ContentIncludeMode.ALWAYS, +) + + +class ContentLocation(object): + """ + This is primarily used for handling file attachments. The idea is + to track the source of the attachment itself. We don't want + remote calls to a server to access local attachments for example. + + By knowing the attachment type and cross-associating it with how + we plan on accessing the content, we can make a judgement call + (for security reasons) if we will allow it. + + Obviously local uses of apprise can access both local and remote + type files. + """ + # Content is located locally (on the same server as apprise) + LOCAL = 'local' + + # Content is located in a remote location + HOSTED = 'hosted' + + # Content is inaccessible + INACCESSIBLE = 'n/a' + + +CONTENT_LOCATIONS = ( + ContentLocation.LOCAL, + ContentLocation.HOSTED, + ContentLocation.INACCESSIBLE, ) # This is a reserved tag that is automatically assigned to every diff --git a/libs/apprise/common.pyi b/libs/apprise/common.pyi new file mode 100644 index 000000000..769573185 --- /dev/null +++ b/libs/apprise/common.pyi @@ -0,0 +1,15 @@ +class NotifyType: + INFO: NotifyType + SUCCESS: NotifyType + WARNING: NotifyType + FAILURE: NotifyType + +class NotifyFormat: + TEXT: NotifyFormat + HTML: NotifyFormat + MARKDOWN: NotifyFormat + +class ContentLocation: + LOCAL: ContentLocation + HOSTED: ContentLocation + INACCESSIBLE: ContentLocation \ No newline at end of file diff --git a/libs/apprise/config/ConfigBase.py b/libs/apprise/config/ConfigBase.py index 22efd8e29..f2b958ed8 100644 --- a/libs/apprise/config/ConfigBase.py +++ b/libs/apprise/config/ConfigBase.py @@ -34,13 +34,18 @@ from ..AppriseAsset import AppriseAsset from ..URLBase import URLBase from ..common import ConfigFormat from ..common import CONFIG_FORMATS -from ..common import ConfigIncludeMode +from ..common import ContentIncludeMode from ..utils import GET_SCHEMA_RE from ..utils import parse_list from ..utils import parse_bool from ..utils import parse_urls +from ..utils import cwe312_url from . import SCHEMA_MAP +# Test whether token is valid or not +VALID_TOKEN = re.compile( + r'(?P[a-z0-9][a-z0-9_]+)', re.I) + class ConfigBase(URLBase): """ @@ -65,7 +70,7 @@ class ConfigBase(URLBase): # By default all configuration is not includable using the 'include' # line found in configuration files. - allow_cross_includes = ConfigIncludeMode.NEVER + allow_cross_includes = ContentIncludeMode.NEVER # the config path manages the handling of relative include config_path = os.getcwd() @@ -205,8 +210,8 @@ class ConfigBase(URLBase): # Configuration files were detected; recursively populate them # If we have been configured to do so for url in configs: - if self.recursion > 0: + if self.recursion > 0: # Attempt to acquire the schema at the very least to allow # our configuration based urls. schema = GET_SCHEMA_RE.match(url) @@ -219,6 +224,7 @@ class ConfigBase(URLBase): url = os.path.join(self.config_path, url) url = '{}://{}'.format(schema, URLBase.quote(url)) + else: # Ensure our schema is always in lower case schema = schema.group('schema').lower() @@ -229,27 +235,31 @@ class ConfigBase(URLBase): 'Unsupported include schema {}.'.format(schema)) continue + # CWE-312 (Secure Logging) Handling + loggable_url = url if not asset.secure_logging \ + else cwe312_url(url) + # Parse our url details of the server object as dictionary # containing all of the information parsed from our URL results = SCHEMA_MAP[schema].parse_url(url) if not results: # Failed to parse the server URL self.logger.warning( - 'Unparseable include URL {}'.format(url)) + 'Unparseable include URL {}'.format(loggable_url)) continue # Handle cross inclusion based on allow_cross_includes rules if (SCHEMA_MAP[schema].allow_cross_includes == - ConfigIncludeMode.STRICT + ContentIncludeMode.STRICT and schema not in self.schemas() and not self.insecure_includes) or \ SCHEMA_MAP[schema].allow_cross_includes == \ - ConfigIncludeMode.NEVER: + ContentIncludeMode.NEVER: # Prevent the loading if insecure base protocols ConfigBase.logger.warning( 'Including {}:// based configuration is prohibited. ' - 'Ignoring URL {}'.format(schema, url)) + 'Ignoring URL {}'.format(schema, loggable_url)) continue # Prepare our Asset Object @@ -275,7 +285,7 @@ class ConfigBase(URLBase): except Exception as e: # the arguments are invalid or can not be used. self.logger.warning( - 'Could not load include URL: {}'.format(url)) + 'Could not load include URL: {}'.format(loggable_url)) self.logger.debug('Loading Exception: {}'.format(str(e))) continue @@ -288,16 +298,23 @@ class ConfigBase(URLBase): del cfg_plugin else: + # CWE-312 (Secure Logging) Handling + loggable_url = url if not asset.secure_logging \ + else cwe312_url(url) + self.logger.debug( - 'Recursion limit reached; ignoring Include URL: %s' % url) + 'Recursion limit reached; ignoring Include URL: %s', + loggable_url) if self._cached_servers: - self.logger.info('Loaded {} entries from {}'.format( - len(self._cached_servers), self.url())) + self.logger.info( + 'Loaded {} entries from {}'.format( + len(self._cached_servers), + self.url(privacy=asset.secure_logging))) else: self.logger.warning( 'Failed to load Apprise configuration from {}'.format( - self.url())) + self.url(privacy=asset.secure_logging))) # Set the time our content was cached at self._cached_time = time.time() @@ -527,6 +544,9 @@ class ConfigBase(URLBase): # the include keyword configs = list() + # Prepare our Asset Object + asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() + # Define what a valid line should look like valid_line_re = re.compile( r'^\s*(?P([;#]+(?P.*))|' @@ -563,27 +583,37 @@ class ConfigBase(URLBase): continue if config: - ConfigBase.logger.debug('Include URL: {}'.format(config)) + # CWE-312 (Secure Logging) Handling + loggable_url = config if not asset.secure_logging \ + else cwe312_url(config) + + ConfigBase.logger.debug( + 'Include URL: {}'.format(loggable_url)) # Store our include line configs.append(config.strip()) continue + # CWE-312 (Secure Logging) Handling + loggable_url = url if not asset.secure_logging \ + else cwe312_url(url) + # Acquire our url tokens - results = plugins.url_to_dict(url) + results = plugins.url_to_dict( + url, secure_logging=asset.secure_logging) if results is None: # Failed to parse the server URL ConfigBase.logger.warning( - 'Unparseable URL {} on line {}.'.format(url, line)) + 'Unparseable URL {} on line {}.'.format( + loggable_url, line)) continue # Build a list of tags to associate with the newly added # notifications if any were set results['tag'] = set(parse_list(result.group('tags'))) - # Prepare our Asset Object - results['asset'] = \ - asset if isinstance(asset, AppriseAsset) else AppriseAsset() + # Set our Asset Object + results['asset'] = asset try: # Attempt to create an instance of our plugin using the @@ -591,13 +621,14 @@ class ConfigBase(URLBase): plugin = plugins.SCHEMA_MAP[results['schema']](**results) # Create log entry of loaded URL - ConfigBase.logger.debug('Loaded URL: {}'.format(plugin.url())) + ConfigBase.logger.debug( + 'Loaded URL: %s', plugin.url(privacy=asset.secure_logging)) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( 'Could not load URL {} on line {}.'.format( - url, line)) + loggable_url, line)) ConfigBase.logger.debug('Loading Exception: %s' % str(e)) continue @@ -633,7 +664,9 @@ class ConfigBase(URLBase): # Load our data (safely) result = yaml.load(content, Loader=yaml.SafeLoader) - except (AttributeError, yaml.error.MarkedYAMLError) as e: + except (AttributeError, + yaml.parser.ParserError, + yaml.error.MarkedYAMLError) as e: # Invalid content ConfigBase.logger.error( 'Invalid Apprise YAML data specified.') @@ -671,7 +704,9 @@ class ConfigBase(URLBase): continue if not (hasattr(asset, k) and - isinstance(getattr(asset, k), six.string_types)): + isinstance(getattr(asset, k), + (bool, six.string_types))): + # We can't set a function or non-string set value ConfigBase.logger.warning( 'Invalid asset key "{}".'.format(k)) @@ -681,15 +716,23 @@ class ConfigBase(URLBase): # Convert to an empty string v = '' - if not isinstance(v, six.string_types): + if (isinstance(v, (bool, six.string_types)) + and isinstance(getattr(asset, k), bool)): + + # If the object in the Asset is a boolean, then + # we want to convert the specified string to + # match that. + setattr(asset, k, parse_bool(v)) + + elif isinstance(v, six.string_types): + # Set our asset object with the new value + setattr(asset, k, v.strip()) + + else: # we must set strings with a string ConfigBase.logger.warning( 'Invalid asset value to "{}".'.format(k)) continue - - # Set our asset object with the new value - setattr(asset, k, v.strip()) - # # global tag root directive # @@ -740,6 +783,10 @@ class ConfigBase(URLBase): # we can. Reset it to None on each iteration results = list() + # CWE-312 (Secure Logging) Handling + loggable_url = url if not asset.secure_logging \ + else cwe312_url(url) + if isinstance(url, six.string_types): # We're just a simple URL string... schema = GET_SCHEMA_RE.match(url) @@ -748,16 +795,18 @@ class ConfigBase(URLBase): # config file at least has something to take action # with. ConfigBase.logger.warning( - 'Invalid URL {}, entry #{}'.format(url, no + 1)) + 'Invalid URL {}, entry #{}'.format( + loggable_url, no + 1)) continue # We found a valid schema worthy of tracking; store it's # details: - _results = plugins.url_to_dict(url) + _results = plugins.url_to_dict( + url, secure_logging=asset.secure_logging) if _results is None: ConfigBase.logger.warning( 'Unparseable URL {}, entry #{}'.format( - url, no + 1)) + loggable_url, no + 1)) continue # add our results to our global set @@ -791,19 +840,20 @@ class ConfigBase(URLBase): .format(key, no + 1)) continue - # Store our URL and Schema Regex - _url = key - # Store our schema schema = _schema.group('schema').lower() + # Store our URL and Schema Regex + _url = key + if _url is None: # the loop above failed to match anything ConfigBase.logger.warning( - 'Unsupported schema in urls, entry #{}'.format(no + 1)) + 'Unsupported URL, entry #{}'.format(no + 1)) continue - _results = plugins.url_to_dict(_url) + _results = plugins.url_to_dict( + _url, secure_logging=asset.secure_logging) if _results is None: # Setup dictionary _results = { @@ -830,12 +880,33 @@ class ConfigBase(URLBase): if 'schema' in entries: del entries['schema'] + # support our special tokens (if they're present) + if schema in plugins.SCHEMA_MAP: + entries = ConfigBase._special_token_handler( + schema, entries) + # Extend our dictionary with our new entries r.update(entries) # add our results to our global set results.append(r) + elif isinstance(tokens, dict): + # support our special tokens (if they're present) + if schema in plugins.SCHEMA_MAP: + tokens = ConfigBase._special_token_handler( + schema, tokens) + + # Copy ourselves a template of our parsed URL as a base to + # work with + r = _results.copy() + + # add our result set + r.update(tokens) + + # add our results to our global set + results.append(r) + else: # add our results to our global set results.append(_results) @@ -867,6 +938,17 @@ class ConfigBase(URLBase): # Just use the global settings _results['tag'] = global_tags + for key in list(_results.keys()): + # Strip out any tokens we know that we can't accept and + # warn the user + match = VALID_TOKEN.match(key) + if not match: + ConfigBase.logger.warning( + 'Ignoring invalid token ({}) found in YAML ' + 'configuration entry #{}, item #{}' + .format(key, no + 1, entry)) + del _results[key] + ConfigBase.logger.trace( 'URL #{}: {} unpacked as:{}{}' .format(no + 1, url, os.linesep, os.linesep.join( @@ -883,7 +965,8 @@ class ConfigBase(URLBase): # Create log entry of loaded URL ConfigBase.logger.debug( - 'Loaded URL: {}'.format(plugin.url())) + 'Loaded URL: {}'.format( + plugin.url(privacy=asset.secure_logging))) except Exception as e: # the arguments are invalid or can not be used. @@ -913,6 +996,135 @@ class ConfigBase(URLBase): # Pop the element off of the stack return self._cached_servers.pop(index) + @staticmethod + def _special_token_handler(schema, tokens): + """ + This function takes a list of tokens and updates them to no longer + include any special tokens such as +,-, and : + + - schema must be a valid schema of a supported plugin type + - tokens must be a dictionary containing the yaml entries parsed. + + The idea here is we can post process a set of tokens provided in + a YAML file where the user provided some of the special keywords. + + We effectivley look up what these keywords map to their appropriate + value they're expected + """ + # Create a copy of our dictionary + tokens = tokens.copy() + + for kw, meta in plugins.SCHEMA_MAP[schema]\ + .template_kwargs.items(): + + # Determine our prefix: + prefix = meta.get('prefix', '+') + + # Detect any matches + matches = \ + {k[1:]: str(v) for k, v in tokens.items() + if k.startswith(prefix)} + + if not matches: + # we're done with this entry + continue + + if not isinstance(tokens.get(kw), dict): + # Invalid; correct it + tokens[kw] = dict() + + # strip out processed tokens + tokens = {k: v for k, v in tokens.items() + if not k.startswith(prefix)} + + # Update our entries + tokens[kw].update(matches) + + # Now map our tokens accordingly to the class templates defined by + # each service. + # + # This is specifically used for YAML file parsing. It allows a user to + # define an entry such as: + # + # urls: + # - mailto://user:pass@domain: + # - to: user1@hotmail.com + # - to: user2@hotmail.com + # + # Under the hood, the NotifyEmail() class does not parse the `to` + # argument. It's contents needs to be mapped to `targets`. This is + # defined in the class via the `template_args` and template_tokens` + # section. + # + # This function here allows these mappings to take place within the + # YAML file as independant arguments. + class_templates = \ + plugins.details(plugins.SCHEMA_MAP[schema]) + + for key in list(tokens.keys()): + + if key not in class_templates['args']: + # No need to handle non-arg entries + continue + + # get our `map_to` and/or 'alias_of' value (if it exists) + map_to = class_templates['args'][key].get( + 'alias_of', class_templates['args'][key].get('map_to', '')) + + if map_to == key: + # We're already good as we are now + continue + + if map_to in class_templates['tokens']: + meta = class_templates['tokens'][map_to] + + else: + meta = class_templates['args'].get( + map_to, class_templates['args'][key]) + + # Perform a translation/mapping if our code reaches here + value = tokens[key] + del tokens[key] + + # Detect if we're dealign with a list or not + is_list = re.search( + r'^(list|choice):.*', + meta.get('type'), + re.IGNORECASE) + + if map_to not in tokens: + tokens[map_to] = [] if is_list \ + else meta.get('default') + + elif is_list and not isinstance(tokens.get(map_to), list): + # Convert ourselves to a list if we aren't already + tokens[map_to] = [tokens[map_to]] + + # Type Conversion + if re.search( + r'^(choice:)?string', + meta.get('type'), + re.IGNORECASE) \ + and not isinstance(value, six.string_types): + + # Ensure our format is as expected + value = str(value) + + # Apply any further translations if required (absolute map) + # This is the case when an arg maps to a token which further + # maps to a different function arg on the class constructor + abs_map = meta.get('map_to', map_to) + + # Set our token as how it was provided by the configuration + if isinstance(tokens.get(map_to), list): + tokens[abs_map].append(value) + + else: + tokens[abs_map] = value + + # Return our tokens + return tokens + def __getitem__(self, index): """ Returns the indexed server entry associated with the loaded diff --git a/libs/apprise/config/ConfigBase.pyi b/libs/apprise/config/ConfigBase.pyi new file mode 100644 index 000000000..abff1204d --- /dev/null +++ b/libs/apprise/config/ConfigBase.pyi @@ -0,0 +1,3 @@ +from .. import URLBase + +class ConfigBase(URLBase): ... \ No newline at end of file diff --git a/libs/apprise/config/ConfigFile.py b/libs/apprise/config/ConfigFile.py index 9f8102253..6fd1ecb23 100644 --- a/libs/apprise/config/ConfigFile.py +++ b/libs/apprise/config/ConfigFile.py @@ -28,7 +28,7 @@ import io import os from .ConfigBase import ConfigBase from ..common import ConfigFormat -from ..common import ConfigIncludeMode +from ..common import ContentIncludeMode from ..AppriseLocale import gettext_lazy as _ @@ -44,7 +44,7 @@ class ConfigFile(ConfigBase): protocol = 'file' # Configuration file inclusion can only be of the same type - allow_cross_includes = ConfigIncludeMode.STRICT + allow_cross_includes = ContentIncludeMode.STRICT def __init__(self, path, **kwargs): """ diff --git a/libs/apprise/config/ConfigHTTP.py b/libs/apprise/config/ConfigHTTP.py index c4ad29425..88352733c 100644 --- a/libs/apprise/config/ConfigHTTP.py +++ b/libs/apprise/config/ConfigHTTP.py @@ -28,7 +28,7 @@ import six import requests from .ConfigBase import ConfigBase from ..common import ConfigFormat -from ..common import ConfigIncludeMode +from ..common import ContentIncludeMode from ..URLBase import PrivacyMode from ..AppriseLocale import gettext_lazy as _ @@ -66,7 +66,7 @@ class ConfigHTTP(ConfigBase): max_error_buffer_size = 2048 # Configuration file inclusion can always include this type - allow_cross_includes = ConfigIncludeMode.ALWAYS + allow_cross_includes = ContentIncludeMode.ALWAYS def __init__(self, headers=None, **kwargs): """ diff --git a/libs/apprise/i18n/apprise.pot b/libs/apprise/i18n/apprise.pot index 2a0dc5e0a..274b379c1 100644 --- a/libs/apprise/i18n/apprise.pot +++ b/libs/apprise/i18n/apprise.pot @@ -1,21 +1,27 @@ # Translations template for apprise. -# Copyright (C) 2020 Chris Caron +# Copyright (C) 2021 Chris Caron # This file is distributed under the same license as the apprise project. -# FIRST AUTHOR , 2020. +# FIRST AUTHOR , 2021. # #, fuzzy msgid "" msgstr "" -"Project-Id-Version: apprise 0.8.8\n" +"Project-Id-Version: apprise 0.9.6\n" "Report-Msgid-Bugs-To: lead2gold@gmail.com\n" -"POT-Creation-Date: 2020-09-02 07:46-0400\n" +"POT-Creation-Date: 2021-12-01 18:56-0500\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=utf-8\n" "Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.7.0\n" +"Generated-By: Babel 2.9.1\n" + +msgid "A local Gnome environment is required." +msgstr "" + +msgid "A local Microsoft Windows environment is required." +msgstr "" msgid "API Key" msgstr "" @@ -44,6 +50,27 @@ msgstr "" msgid "Add Tokens" msgstr "" +msgid "Alert Type" +msgstr "" + +msgid "Alias" +msgstr "" + +msgid "Amount" +msgstr "" + +msgid "App Access Token" +msgstr "" + +msgid "App ID" +msgstr "" + +msgid "App Version" +msgstr "" + +msgid "Application ID" +msgstr "" + msgid "Application Key" msgstr "" @@ -83,6 +110,9 @@ msgstr "" msgid "Cache Results" msgstr "" +msgid "Call" +msgstr "" + msgid "Carbon Copy" msgstr "" @@ -104,15 +134,27 @@ msgstr "" msgid "Country" msgstr "" +msgid "Currency" +msgstr "" + msgid "Custom Icon" msgstr "" msgid "Cycles" msgstr "" +msgid "DBus Notification" +msgstr "" + +msgid "Details" +msgstr "" + msgid "Detect Bot Owner" msgstr "" +msgid "Device" +msgstr "" + msgid "Device API Key" msgstr "" @@ -134,12 +176,18 @@ msgstr "" msgid "Email" msgstr "" +msgid "Email Header" +msgstr "" + msgid "Encrypted Password" msgstr "" msgid "Encrypted Salt" msgstr "" +msgid "Entity" +msgstr "" + msgid "Event" msgstr "" @@ -152,6 +200,12 @@ msgstr "" msgid "Facility" msgstr "" +msgid "Flair ID" +msgstr "" + +msgid "Flair Text" +msgstr "" + msgid "Footer Logo" msgstr "" @@ -170,6 +224,9 @@ msgstr "" msgid "From Phone No" msgstr "" +msgid "Gnome Notification" +msgstr "" + msgid "Group" msgstr "" @@ -185,12 +242,33 @@ msgstr "" msgid "Icon Type" msgstr "" +msgid "Identifier" +msgstr "" + +msgid "Image Link" +msgstr "" + msgid "Include Footer" msgstr "" msgid "Include Image" msgstr "" +msgid "Include Segment" +msgstr "" + +msgid "Is Ad?" +msgstr "" + +msgid "Is Spoiler" +msgstr "" + +msgid "Kind" +msgstr "" + +msgid "Language" +msgstr "" + msgid "Local File" msgstr "" @@ -200,6 +278,15 @@ msgstr "" msgid "Log to STDERR" msgstr "" +msgid "Long-Lived Access Token" +msgstr "" + +msgid "MacOSX Notification" +msgstr "" + +msgid "Master Key" +msgstr "" + msgid "Memory" msgstr "" @@ -209,18 +296,41 @@ msgstr "" msgid "Message Mode" msgstr "" +msgid "Message Type" +msgstr "" + msgid "Modal" msgstr "" msgid "Mode" msgstr "" +msgid "NSFW" +msgstr "" + +msgid "Name" +msgstr "" + +msgid "No dependencies." +msgstr "" + +msgid "Notification ID" +msgstr "" + msgid "Notify Format" msgstr "" msgid "OAuth Access Token" msgstr "" +msgid "OAuth2 KeyFile" +msgstr "" + +msgid "" +"Only works with Mac OS X 10.8 and higher. Additionally requires that " +"/usr/local/bin/terminal-notifier is locally accessible." +msgstr "" + msgid "Organization" msgstr "" @@ -230,6 +340,12 @@ msgstr "" msgid "Overflow Mode" msgstr "" +msgid "Packages are recommended to improve functionality." +msgstr "" + +msgid "Packages are required to function." +msgstr "" + msgid "Password" msgstr "" @@ -254,6 +370,9 @@ msgstr "" msgid "Provider Key" msgstr "" +msgid "QOS" +msgstr "" + msgid "Region" msgstr "" @@ -263,6 +382,9 @@ msgstr "" msgid "Remove Tokens" msgstr "" +msgid "Resubmit Flag" +msgstr "" + msgid "Retry" msgstr "" @@ -287,6 +409,9 @@ msgstr "" msgid "Secure Mode" msgstr "" +msgid "Send Replies" +msgstr "" + msgid "Sender ID" msgstr "" @@ -296,6 +421,9 @@ msgstr "" msgid "Server Timeout" msgstr "" +msgid "Silent Notification" +msgstr "" + msgid "Socket Connect Timeout" msgstr "" @@ -305,6 +433,9 @@ msgstr "" msgid "Sound" msgstr "" +msgid "Sound Link" +msgstr "" + msgid "Source Email" msgstr "" @@ -314,12 +445,21 @@ msgstr "" msgid "Source Phone No" msgstr "" +msgid "Special Text Color" +msgstr "" + msgid "Sticky" msgstr "" msgid "Subtitle" msgstr "" +msgid "Syslog Mode" +msgstr "" + +msgid "Tags" +msgstr "" + msgid "Target Channel" msgstr "" @@ -344,24 +484,45 @@ msgstr "" msgid "Target Encoded ID" msgstr "" +msgid "Target Escalation" +msgstr "" + msgid "Target JID" msgstr "" msgid "Target Phone No" msgstr "" +msgid "Target Player ID" +msgstr "" + +msgid "Target Queue" +msgstr "" + msgid "Target Room Alias" msgstr "" msgid "Target Room ID" msgstr "" +msgid "Target Schedule" +msgstr "" + msgid "Target Short Code" msgstr "" +msgid "Target Stream" +msgstr "" + +msgid "Target Subreddit" +msgstr "" + msgid "Target Tag ID" msgstr "" +msgid "Target Team" +msgstr "" + msgid "Target Topic" msgstr "" @@ -371,12 +532,24 @@ msgstr "" msgid "Targets" msgstr "" +msgid "Targets " +msgstr "" + +msgid "Team Name" +msgstr "" + msgid "Template" msgstr "" msgid "Template Data" msgstr "" +msgid "Template Path" +msgstr "" + +msgid "Template Tokens" +msgstr "" + msgid "Tenant Domain" msgstr "" @@ -404,12 +577,27 @@ msgstr "" msgid "Token C" msgstr "" +msgid "URL" +msgstr "" + +msgid "URL Title" +msgstr "" + msgid "Urgency" msgstr "" msgid "Use Avatar" msgstr "" +msgid "Use Blocks" +msgstr "" + +msgid "Use Fields" +msgstr "" + +msgid "Use Session" +msgstr "" + msgid "User ID" msgstr "" @@ -434,18 +622,27 @@ msgstr "" msgid "Web Based" msgstr "" +msgid "Web Page Preview" +msgstr "" + msgid "Webhook" msgstr "" msgid "Webhook ID" msgstr "" +msgid "Webhook Key" +msgstr "" + msgid "Webhook Mode" msgstr "" msgid "Webhook Token" msgstr "" +msgid "Workspace" +msgstr "" + msgid "X-Axis" msgstr "" @@ -455,6 +652,9 @@ msgstr "" msgid "Y-Axis" msgstr "" +msgid "libdbus-1.so.x must be installed." +msgstr "" + msgid "ttl" msgstr "" diff --git a/libs/apprise/logger.py b/libs/apprise/logger.py index c09027dff..082178129 100644 --- a/libs/apprise/logger.py +++ b/libs/apprise/logger.py @@ -23,7 +23,12 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. +import os import logging +from io import StringIO + +# The root identifier needed to monitor 'apprise' logging +LOGGER_NAME = 'apprise' # Define a verbosity level that is a noisier then debug mode logging.TRACE = logging.DEBUG - 1 @@ -57,5 +62,136 @@ def deprecate(self, message, *args, **kwargs): logging.Logger.trace = trace logging.Logger.deprecate = deprecate -# Create ourselve a generic logging reference -logger = logging.getLogger('apprise') +# Create ourselve a generic (singleton) logging reference +logger = logging.getLogger(LOGGER_NAME) + + +class LogCapture(object): + """ + A class used to allow one to instantiate loggers that write to + memory for temporary purposes. e.g.: + + 1. with LogCapture() as captured: + 2. + 3. # Send our notification(s) + 4. aobj.notify("hello world") + 5. + 6. # retrieve our logs produced by the above call via our + 7. # `captured` StringIO object we have access to within the `with` + 8. # block here: + 9. print(captured.getvalue()) + + """ + def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True, + fmt='%(asctime)s - %(levelname)s - %(message)s'): + """ + Instantiate a temporary log capture object + + If a path is specified, then log content is sent to that file instead + of a StringIO object. + + You can optionally specify a logging level such as logging.INFO if you + wish, otherwise by default the script uses whatever logging has been + set globally. If you set delete to `False` then when using log files, + they are not automatically cleaned up afterwards. + + Optionally over-ride the fmt as well if you wish. + + """ + # Our memory buffer placeholder + self.__buffer_ptr = StringIO() + + # Store our file path as it will determine whether or not we write to + # memory and a file + self.__path = path + self.__delete = delete + + # Our logging level tracking + self.__level = level + self.__restore_level = None + + # Acquire a pointer to our logger + self.__logger = logging.getLogger(name) + + # Prepare our handler + self.__handler = logging.StreamHandler(self.__buffer_ptr) \ + if not self.__path else logging.FileHandler( + self.__path, mode='a', encoding='utf-8') + + # Use the specified level, otherwise take on the already + # effective level of our logger + self.__handler.setLevel( + self.__level if self.__level is not None + else self.__logger.getEffectiveLevel()) + + # Prepare our formatter + self.__handler.setFormatter(logging.Formatter(fmt)) + + def __enter__(self): + """ + Allows logger manipulation within a 'with' block + """ + + if self.__level is not None: + # Temporary adjust our log level if required + self.__restore_level = self.__logger.getEffectiveLevel() + if self.__restore_level > self.__level: + # Bump our log level up for the duration of our `with` + self.__logger.setLevel(self.__level) + + else: + # No restoration required + self.__restore_level = None + + else: + # Do nothing but enforce that we have nothing to restore to + self.__restore_level = None + + if self.__path: + # If a path has been identified, ensure we can write to the path + # and that the file exists + with open(self.__path, 'a'): + os.utime(self.__path, None) + + # Update our buffer pointer + self.__buffer_ptr = open(self.__path, 'r') + + # Add our handler + self.__logger.addHandler(self.__handler) + + # return our memory pointer + return self.__buffer_ptr + + def __exit__(self, exc_type, exc_value, tb): + """ + removes the handler gracefully when the with block has completed + """ + + # Flush our content + self.__handler.flush() + self.__buffer_ptr.flush() + + # Drop our handler + self.__logger.removeHandler(self.__handler) + + if self.__restore_level is not None: + # Restore level + self.__logger.setLevel(self.__restore_level) + + if self.__path: + # Close our file pointer + self.__buffer_ptr.close() + if self.__delete: + try: + # Always remove file afterwards + os.unlink(self.__path) + + except OSError: + # It's okay if the file does not exist + pass + + if exc_type is not None: + # pass exception on if one was generated + return False + + return True diff --git a/libs/apprise/plugins/NotifyAppriseAPI.py b/libs/apprise/plugins/NotifyAppriseAPI.py new file mode 100644 index 000000000..b981f97a2 --- /dev/null +++ b/libs/apprise/plugins/NotifyAppriseAPI.py @@ -0,0 +1,382 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2021 Chris Caron +# All rights reserved. +# +# This code is licensed under the MIT License. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files(the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions : +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import re +import six +import requests +from json import dumps + +from .NotifyBase import NotifyBase +from ..URLBase import PrivacyMode +from ..common import NotifyType +from ..utils import parse_list +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + + +class NotifyAppriseAPI(NotifyBase): + """ + A wrapper for Apprise (Persistent) API Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'Apprise API' + + # The services URL + service_url = 'https://github.com/caronc/apprise-api' + + # The default protocol + protocol = 'apprise' + + # The default secure protocol + secure_protocol = 'apprises' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api' + + # Depending on the number of transactions/notifications taking place, this + # could take a while. 30 seconds should be enough to perform the task + socket_connect_timeout = 30.0 + + # Disable throttle rate for Apprise API requests since they are normally + # local anyway + request_rate_per_sec = 0.0 + + # Define object templates + templates = ( + '{schema}://{host}/{token}', + '{schema}://{host}:{port}/{token}', + '{schema}://{user}@{host}/{token}', + '{schema}://{user}@{host}:{port}/{token}', + '{schema}://{user}:{password}@{host}/{token}', + '{schema}://{user}:{password}@{host}:{port}/{token}', + ) + + # Define our tokens; these are the minimum tokens required required to + # be passed into this function (as arguments). The syntax appends any + # previously defined in the base package and builds onto them + template_tokens = dict(NotifyBase.template_tokens, **{ + 'host': { + 'name': _('Hostname'), + 'type': 'string', + 'required': True, + }, + 'port': { + 'name': _('Port'), + 'type': 'int', + 'min': 1, + 'max': 65535, + }, + 'user': { + 'name': _('Username'), + 'type': 'string', + }, + 'password': { + 'name': _('Password'), + 'type': 'string', + 'private': True, + }, + 'token': { + 'name': _('Token'), + 'type': 'string', + 'required': True, + 'private': True, + 'regex': (r'^[A-Z0-9_-]{1,32}$', 'i'), + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'tags': { + 'name': _('Tags'), + 'type': 'string', + }, + 'to': { + 'alias_of': 'token', + }, + }) + + # Define any kwargs we're using + template_kwargs = { + 'headers': { + 'name': _('HTTP Header'), + 'prefix': '+', + }, + } + + def __init__(self, token=None, tags=None, headers=None, **kwargs): + """ + Initialize Apprise API Object + + headers can be a dictionary of key/value pairs that you want to + additionally include as part of the server headers to post with + + """ + super(NotifyAppriseAPI, self).__init__(**kwargs) + + self.fullpath = kwargs.get('fullpath') + if not isinstance(self.fullpath, six.string_types): + self.fullpath = '/' + + self.token = validate_regex( + token, *self.template_tokens['token']['regex']) + if not self.token: + msg = 'The Apprise API token specified ({}) is invalid.'\ + .format(token) + self.logger.warning(msg) + raise TypeError(msg) + + # Build list of tags + self.__tags = parse_list(tags) + + self.headers = {} + if headers: + # Store our extra headers + self.headers.update(headers) + + return + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Our URL parameters + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + # Append our headers into our parameters + params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + + if self.__tags: + params['tags'] = ','.join([x for x in self.__tags]) + + # Determine Authentication + auth = '' + if self.user and self.password: + auth = '{user}:{password}@'.format( + user=NotifyAppriseAPI.quote(self.user, safe=''), + password=self.pprint( + self.password, privacy, mode=PrivacyMode.Secret, safe=''), + ) + elif self.user: + auth = '{user}@'.format( + user=NotifyAppriseAPI.quote(self.user, safe=''), + ) + + default_port = 443 if self.secure else 80 + + fullpath = self.fullpath.strip('/') + return '{schema}://{auth}{hostname}{port}{fullpath}{token}' \ + '/?{params}'.format( + schema=self.secure_protocol + if self.secure else self.protocol, + auth=auth, + # never encode hostname since we're expecting it to be a + # valid one + hostname=self.host, + port='' if self.port is None or self.port == default_port + else ':{}'.format(self.port), + fullpath='/{}/'.format(NotifyAppriseAPI.quote( + fullpath, safe='/')) if fullpath else '/', + token=self.pprint(self.token, privacy, safe=''), + params=NotifyAppriseAPI.urlencode(params)) + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform Apprise API Notification + """ + + headers = {} + # Apply any/all header over-rides defined + headers.update(self.headers) + + # prepare Apprise API Object + payload = { + # Apprise API Payload + 'title': title, + 'body': body, + 'type': notify_type, + 'format': self.notify_format, + } + + if self.__tags: + payload['tag'] = self.__tags + + auth = None + if self.user: + auth = (self.user, self.password) + + # Set our schema + schema = 'https' if self.secure else 'http' + + url = '%s://%s' % (schema, self.host) + if isinstance(self.port, int): + url += ':%d' % self.port + + fullpath = self.fullpath.strip('/') + url += '/{}/'.format(fullpath) if fullpath else '/' + url += 'notify/{}'.format(self.token) + + # Some entries can not be over-ridden + headers.update({ + 'User-Agent': self.app_id, + 'Content-Type': 'application/json', + # Pass our Source UUID4 Identifier + 'X-Apprise-ID': self.asset._uid, + # Pass our current recursion count to our upstream server + 'X-Apprise-Recursion-Count': str(self.asset._recursion + 1), + }) + + self.logger.debug('Apprise API POST URL: %s (cert_verify=%r)' % ( + url, self.verify_certificate, + )) + self.logger.debug('Apprise API Payload: %s' % str(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + url, + data=dumps(payload), + headers=headers, + auth=auth, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + if r.status_code != requests.codes.ok: + # We had a problem + status_str = \ + NotifyAppriseAPI.http_response_code_lookup(r.status_code) + + self.logger.warning( + 'Failed to send Apprise API notification: ' + '{}{}error={}.'.format( + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug('Response Details:\r\n{}'.format(r.content)) + + # Return; we're done + return False + + else: + self.logger.info('Sent Apprise API notification.') + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occurred sending Apprise API ' + 'notification to %s.' % self.host) + self.logger.debug('Socket Exception: %s' % str(e)) + + # Return; we're done + return False + + return True + + @staticmethod + def parse_native_url(url): + """ + Support http://hostname/notify/token and + http://hostname/path/notify/token + """ + + result = re.match( + r'^http(?Ps?)://(?P[A-Z0-9._-]+)' + r'(:(?P[0-9]+))?' + r'(?P/[^?]+?)?/notify/(?P[A-Z0-9_-]{1,32})/?' + r'(?P\?.+)?$', url, re.I) + + if result: + return NotifyAppriseAPI.parse_url( + '{schema}://{hostname}{port}{path}/{token}/{params}'.format( + schema=NotifyAppriseAPI.secure_protocol + if result.group('secure') else NotifyAppriseAPI.protocol, + hostname=result.group('hostname'), + port='' if not result.group('port') + else ':{}'.format(result.group('port')), + path='' if not result.group('path') + else result.group('path'), + token=result.group('token'), + params='' if not result.group('params') + else '?{}'.format(result.group('params')))) + + return None + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to re-instantiate this object. + + """ + results = NotifyBase.parse_url(url) + if not results: + # We're done early as we couldn't load the results + return results + + # Add our headers that the user can potentially over-ride if they wish + # to to our returned result set + results['headers'] = results['qsd+'] + if results['qsd-']: + results['headers'].update(results['qsd-']) + NotifyBase.logger.deprecate( + "minus (-) based Apprise API header tokens are being " + " removed; use the plus (+) symbol instead.") + + # Tidy our header entries by unquoting them + results['headers'] = \ + {NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y) + for x, y in results['headers'].items()} + + # Support the passing of tags in the URL + if 'tags' in results['qsd'] and len(results['qsd']['tags']): + results['tags'] = \ + NotifyAppriseAPI.parse_list(results['qsd']['tags']) + + # Support the 'to' & 'token' variable so that we can support rooms + # this way too. + if 'token' in results['qsd'] and len(results['qsd']['token']): + results['token'] = \ + NotifyAppriseAPI.unquote(results['qsd']['token']) + + elif 'to' in results['qsd'] and len(results['qsd']['to']): + results['token'] = NotifyAppriseAPI.unquote(results['qsd']['to']) + + else: + # Start with a list of path entries to work with + entries = NotifyAppriseAPI.split_path(results['fullpath']) + if entries: + # use our last entry found + results['token'] = entries[-1] + + # pop our last entry off + entries = entries[:-1] + + # re-assemble our full path + results['fullpath'] = '/'.join(entries) + + return results diff --git a/libs/apprise/plugins/NotifyBase.py b/libs/apprise/plugins/NotifyBase.py index 3a0538bcc..82c025506 100644 --- a/libs/apprise/plugins/NotifyBase.py +++ b/libs/apprise/plugins/NotifyBase.py @@ -52,6 +52,54 @@ class NotifyBase(BASE_OBJECT): This is the base class for all notification services """ + # An internal flag used to test the state of the plugin. If set to + # False, then the plugin is not used. Plugins can disable themselves + # due to enviroment issues (such as missing libraries, or platform + # dependencies that are not present). By default all plugins are + # enabled. + enabled = True + + # Some plugins may require additional packages above what is provided + # already by Apprise. + # + # Use this section to relay this information to the users of the script to + # help guide them with what they need to know if they plan on using your + # plugin. The below configuration should otherwise accomodate all normal + # situations and will not requrie any updating: + requirements = { + # Use the description to provide a human interpretable description of + # what is required to make the plugin work. This is only nessisary + # if there are package dependencies. Setting this to default will + # cause a general response to be returned. Only set this if you plan + # on over-riding the default. Always consider language support here. + # So before providing a value do the following in your code base: + # + # from apprise.AppriseLocale import gettext_lazy as _ + # + # 'details': _('My detailed requirements') + 'details': None, + + # Define any required packages needed for the plugin to run. This is + # an array of strings that simply look like lines residing in a + # `requirements.txt` file... + # + # As an example, an entry may look like: + # 'packages_required': [ + # 'cryptography < 3.4`, + # ] + 'packages_required': [], + + # Recommended packages identify packages that are not required to make + # your plugin work, but would improve it's use or grant it access to + # full functionality (that might otherwise be limited). + + # Similar to `packages_required`, you would identify each entry in + # the array as you would in a `requirements.txt` file. + # + # - Do not re-provide entries already in the `packages_required` + 'packages_recommended': [], + } + # The services URL service_url = None @@ -153,7 +201,8 @@ class NotifyBase(BASE_OBJECT): # Provide override self.overflow_mode = overflow - def image_url(self, notify_type, logo=False, extension=None): + def image_url(self, notify_type, logo=False, extension=None, + image_size=None): """ Returns Image URL if possible """ @@ -166,7 +215,7 @@ class NotifyBase(BASE_OBJECT): return self.asset.image_url( notify_type=notify_type, - image_size=self.image_size, + image_size=self.image_size if image_size is None else image_size, logo=logo, extension=extension, ) @@ -222,6 +271,13 @@ class NotifyBase(BASE_OBJECT): """ + if not self.enabled: + # Deny notifications issued to services that are disabled + self.logger.warning( + "{} is currently disabled on this system.".format( + self.service_name)) + return False + # Prepare attachments if required if attach is not None and not isinstance(attach, AppriseAttachment): try: diff --git a/libs/apprise/plugins/NotifyBase.pyi b/libs/apprise/plugins/NotifyBase.pyi new file mode 100644 index 000000000..9cf3e404c --- /dev/null +++ b/libs/apprise/plugins/NotifyBase.pyi @@ -0,0 +1 @@ +class NotifyBase: ... \ No newline at end of file diff --git a/libs/apprise/plugins/NotifyClickSend.py b/libs/apprise/plugins/NotifyClickSend.py index a7d89c18b..9054c6f01 100644 --- a/libs/apprise/plugins/NotifyClickSend.py +++ b/libs/apprise/plugins/NotifyClickSend.py @@ -36,7 +36,6 @@ # The API reference used to build this plugin was documented here: # https://developers.clicksend.com/docs/rest/v3/ # -import re import requests from json import dumps from base64 import b64encode @@ -44,7 +43,8 @@ from base64 import b64encode from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode from ..common import NotifyType -from ..utils import parse_list +from ..utils import is_phone_no +from ..utils import parse_phone_no from ..utils import parse_bool from ..AppriseLocale import gettext_lazy as _ @@ -53,12 +53,6 @@ CLICKSEND_HTTP_ERROR_MAP = { 401: 'Unauthorized - Invalid Token.', } -# Some Phone Number Detection -IS_PHONE_NO = re.compile(r'^\+?(?P[0-9\s)(+-]+)\s*$') - -# Used to break path apart into list of channels -TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,#\\/]+') - class NotifyClickSend(NotifyBase): """ @@ -151,26 +145,18 @@ class NotifyClickSend(NotifyBase): self.logger.warning(msg) raise TypeError(msg) - for target in parse_list(targets): + for target in parse_phone_no(targets): # Validate targets and drop bad ones: - result = IS_PHONE_NO.match(target) - if result: - # Further check our phone # for it's digit count - result = ''.join(re.findall(r'\d+', result.group('phone'))) - if len(result) < 11 or len(result) > 14: - self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), - ) - continue - - # store valid phone number - self.targets.append(result) + result = is_phone_no(target) + if not result: + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) continue - self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target)) + # store valid phone number + self.targets.append(result['full']) def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): """ @@ -321,8 +307,7 @@ class NotifyClickSend(NotifyBase): # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += [x for x in filter( - bool, TARGET_LIST_DELIM.split( - NotifyClickSend.unquote(results['qsd']['to'])))] + results['targets'] += \ + NotifyClickSend.parse_phone_no(results['qsd']['to']) return results diff --git a/libs/apprise/plugins/NotifyD7Networks.py b/libs/apprise/plugins/NotifyD7Networks.py index f04082c68..728f119ab 100644 --- a/libs/apprise/plugins/NotifyD7Networks.py +++ b/libs/apprise/plugins/NotifyD7Networks.py @@ -30,7 +30,6 @@ # (both user and password) from the API Details section from within your # account profile area: https://d7networks.com/accounts/profile/ -import re import six import requests import base64 @@ -40,7 +39,8 @@ from json import loads from .NotifyBase import NotifyBase from ..URLBase import PrivacyMode from ..common import NotifyType -from ..utils import parse_list +from ..utils import is_phone_no +from ..utils import parse_phone_no from ..utils import parse_bool from ..AppriseLocale import gettext_lazy as _ @@ -52,9 +52,6 @@ D7NETWORKS_HTTP_ERROR_MAP = { 500: 'A Serverside Error Occured Handling the Request.', } -# Some Phone Number Detection -IS_PHONE_NO = re.compile(r'^\+?(?P[0-9\s)(+-]+)\s*$') - # Priorities class D7SMSPriority(object): @@ -197,36 +194,26 @@ class NotifyD7Networks(NotifyBase): self.source = None \ if not isinstance(source, six.string_types) else source.strip() - # Parse our targets - self.targets = list() - - for target in parse_list(targets): - # Validate targets and drop bad ones: - result = IS_PHONE_NO.match(target) - if result: - # Further check our phone # for it's digit count - # if it's less than 10, then we can assume it's - # a poorly specified phone no and spit a warning - result = ''.join(re.findall(r'\d+', result.group('phone'))) - if len(result) < 11 or len(result) > 14: - self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), - ) - continue - - # store valid phone number - self.targets.append(result) - continue - - self.logger.warning( - 'Dropped invalid phone # ({}) specified.'.format(target)) - - if len(self.targets) == 0: - msg = 'There are no valid targets identified to notify.' + if not (self.user and self.password): + msg = 'A D7 Networks user/pass was not provided.' self.logger.warning(msg) raise TypeError(msg) + # Parse our targets + self.targets = list() + for target in parse_phone_no(targets): + # Validate targets and drop bad ones: + result = result = is_phone_no(target) + if not result: + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) + continue + + # store valid phone number + self.targets.append(result['full']) + return def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): @@ -235,6 +222,11 @@ class NotifyD7Networks(NotifyBase): redirects to the appropriate handling """ + if len(self.targets) == 0: + # There were no services to notify + self.logger.warning('There were no D7 Networks targets to notify.') + return False + # error tracking (used for function return) has_error = False @@ -479,6 +471,6 @@ class NotifyD7Networks(NotifyBase): # The 'to' makes it easier to use yaml configuration if 'to' in results['qsd'] and len(results['qsd']['to']): results['targets'] += \ - NotifyD7Networks.parse_list(results['qsd']['to']) + NotifyD7Networks.parse_phone_no(results['qsd']['to']) return results diff --git a/libs/apprise/plugins/NotifyDBus.py b/libs/apprise/plugins/NotifyDBus.py index ca501bf9e..145e1c05c 100644 --- a/libs/apprise/plugins/NotifyDBus.py +++ b/libs/apprise/plugins/NotifyDBus.py @@ -38,10 +38,6 @@ NOTIFY_DBUS_SUPPORT_ENABLED = False # Image support is dependant on the GdkPixbuf library being available NOTIFY_DBUS_IMAGE_SUPPORT = False -# The following are required to hook into the notifications: -NOTIFY_DBUS_INTERFACE = 'org.freedesktop.Notifications' -NOTIFY_DBUS_SETTING_LOCATION = '/org/freedesktop/Notifications' - # Initialize our mainloops LOOP_GLIB = None LOOP_QT = None @@ -132,8 +128,19 @@ class NotifyDBus(NotifyBase): A wrapper for local DBus/Qt Notifications """ + # Set our global enabled flag + enabled = NOTIFY_DBUS_SUPPORT_ENABLED + + requirements = { + # Define our required packaging in order to work + 'details': _('libdbus-1.so.x must be installed.') + } + # The default descriptive name associated with the Notification - service_name = 'DBus Notification' + service_name = _('DBus Notification') + + # The services URL + service_url = 'http://www.freedesktop.org/Software/dbus/' # The default protocols # Python 3 keys() does not return a list object, it's it's own dict_keys() @@ -158,14 +165,9 @@ class NotifyDBus(NotifyBase): # content to display body_max_line_count = 10 - # This entry is a bit hacky, but it allows us to unit-test this library - # in an environment that simply doesn't have the gnome packages - # available to us. It also allows us to handle situations where the - # packages actually are present but we need to test that they aren't. - # If anyone is seeing this had knows a better way of testing this - # outside of what is defined in test/test_glib_plugin.py, please - # let me know! :) - _enabled = NOTIFY_DBUS_SUPPORT_ENABLED + # The following are required to hook into the notifications: + dbus_interface = 'org.freedesktop.Notifications' + dbus_setting_location = '/org/freedesktop/Notifications' # Define object templates templates = ( @@ -241,12 +243,6 @@ class NotifyDBus(NotifyBase): """ Perform DBus Notification """ - - if not self._enabled or MAINLOOP_MAP[self.schema] is None: - self.logger.warning( - "{} notifications could not be loaded.".format(self.schema)) - return False - # Acquire our session try: session = SessionBus(mainloop=MAINLOOP_MAP[self.schema]) @@ -265,14 +261,14 @@ class NotifyDBus(NotifyBase): # acquire our dbus object dbus_obj = session.get_object( - NOTIFY_DBUS_INTERFACE, - NOTIFY_DBUS_SETTING_LOCATION, + self.dbus_interface, + self.dbus_setting_location, ) # Acquire our dbus interface dbus_iface = Interface( dbus_obj, - dbus_interface=NOTIFY_DBUS_INTERFACE, + dbus_interface=self.dbus_interface, ) # image path diff --git a/libs/apprise/plugins/NotifyDingTalk.py b/libs/apprise/plugins/NotifyDingTalk.py new file mode 100644 index 000000000..68c069479 --- /dev/null +++ b/libs/apprise/plugins/NotifyDingTalk.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2020 Chris Caron +# All rights reserved. +# +# This code is licensed under the MIT License. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files(the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions : +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import re +import time +import hmac +import hashlib +import base64 +import requests +from json import dumps + +from .NotifyBase import NotifyBase +from ..URLBase import PrivacyMode +from ..common import NotifyFormat +from ..common import NotifyType +from ..utils import parse_list +from ..utils import validate_regex +from ..AppriseLocale import gettext_lazy as _ + +# Register at https://dingtalk.com +# - Download their PC based software as it is the only way you can create +# a custom robot. You can create a custom robot per group. You will +# be provided an access_token that Apprise will need. + +# Syntax: +# dingtalk://{access_token}/ +# dingtalk://{access_token}/{optional_phone_no} +# dingtalk://{access_token}/{phone_no_1}/{phone_no_2}/{phone_no_N/ + +# Some Phone Number Detection +IS_PHONE_NO = re.compile(r'^\+?(?P[0-9\s)(+-]+)\s*$') + + +class NotifyDingTalk(NotifyBase): + """ + A wrapper for DingTalk Notifications + """ + + # The default descriptive name associated with the Notification + service_name = 'DingTalk' + + # The services URL + service_url = 'https://www.dingtalk.com/' + + # All notification requests are secure + secure_protocol = 'dingtalk' + + # A URL that takes you to the setup/help of the specific protocol + setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dingtalk' + + # DingTalk API + notify_url = 'https://oapi.dingtalk.com/robot/send?access_token={token}' + + # Do not set title_maxlen as it is set in a property value below + # since the length varies depending if we are doing a markdown + # based message or a text based one. + # title_maxlen = see below @propery defined + + # Define object templates + templates = ( + '{schema}://{token}/', + '{schema}://{token}/{targets}/', + '{schema}://{secret}@{token}/', + '{schema}://{secret}@{token}/{targets}/', + ) + + # Define our template tokens + template_tokens = dict(NotifyBase.template_tokens, **{ + 'token': { + 'name': _('Token'), + 'type': 'string', + 'private': True, + 'required': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + }, + 'secret': { + 'name': _('Token'), + 'type': 'string', + 'private': True, + 'regex': (r'^[a-z0-9]+$', 'i'), + }, + 'targets': { + 'name': _('Target Phone No'), + 'type': 'list:string', + }, + }) + + # Define our template arguments + template_args = dict(NotifyBase.template_args, **{ + 'to': { + 'alias_of': 'targets', + }, + 'token': { + 'alias_of': 'token', + }, + 'secret': { + 'alias_of': 'secret', + }, + }) + + def __init__(self, token, targets=None, secret=None, **kwargs): + """ + Initialize DingTalk Object + """ + super(NotifyDingTalk, self).__init__(**kwargs) + + # Secret Key (associated with project) + self.token = validate_regex( + token, *self.template_tokens['token']['regex']) + if not self.token: + msg = 'An invalid DingTalk API Token ' \ + '({}) was specified.'.format(token) + self.logger.warning(msg) + raise TypeError(msg) + + self.secret = None + if secret: + self.secret = validate_regex( + secret, *self.template_tokens['secret']['regex']) + if not self.secret: + msg = 'An invalid DingTalk Secret ' \ + '({}) was specified.'.format(token) + self.logger.warning(msg) + raise TypeError(msg) + + # Parse our targets + self.targets = list() + + for target in parse_list(targets): + # Validate targets and drop bad ones: + result = IS_PHONE_NO.match(target) + if result: + # Further check our phone # for it's digit count + result = ''.join(re.findall(r'\d+', result.group('phone'))) + if len(result) < 11 or len(result) > 14: + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) + continue + + # store valid phone number + self.targets.append(result) + continue + + self.logger.warning( + 'Dropped invalid phone # ' + '({}) specified.'.format(target), + ) + + return + + def get_signature(self): + """ + Calculates time-based signature so that we can send arbitrary messages. + """ + timestamp = str(round(time.time() * 1000)) + secret_enc = self.secret.encode('utf-8') + str_to_sign_enc = \ + "{}\n{}".format(timestamp, self.secret).encode('utf-8') + hmac_code = hmac.new( + secret_enc, str_to_sign_enc, digestmod=hashlib.sha256).digest() + signature = NotifyDingTalk.quote(base64.b64encode(hmac_code), safe='') + return timestamp, signature + + def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): + """ + Perform DingTalk Notification + """ + + payload = { + 'msgtype': 'text', + 'at': { + 'atMobiles': self.targets, + 'isAtAll': False, + } + } + + if self.notify_format == NotifyFormat.MARKDOWN: + payload['markdown'] = { + 'title': title, + 'text': body, + } + + else: + payload['text'] = { + 'content': body, + } + + # Our Notification URL + notify_url = self.notify_url.format(token=self.token) + + params = None + if self.secret: + timestamp, signature = self.get_signature() + params = { + 'timestamp': timestamp, + 'sign': signature, + } + + # Prepare our headers + headers = { + 'User-Agent': self.app_id, + 'Content-Type': 'application/json' + } + + # Some Debug Logging + self.logger.debug('DingTalk URL: {} (cert_verify={})'.format( + notify_url, self.verify_certificate)) + self.logger.debug('DingTalk Payload: {}' .format(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + notify_url, + data=dumps(payload), + headers=headers, + params=params, + verify=self.verify_certificate, + ) + + if r.status_code != requests.codes.ok: + # We had a problem + status_str = \ + NotifyDingTalk.http_response_code_lookup( + r.status_code) + + self.logger.warning( + 'Failed to send DingTalk notification: ' + '{}{}error={}.'.format( + status_str, + ', ' if status_str else '', + r.status_code)) + + self.logger.debug( + 'Response Details:\r\n{}'.format(r.content)) + return False + + else: + self.logger.info('Sent DingTalk notification.') + + except requests.RequestException as e: + self.logger.warning( + 'A Connection error occured sending DingTalk ' + 'notification.' + ) + self.logger.debug('Socket Exception: %s' % str(e)) + return False + + return True + + @property + def title_maxlen(self): + """ + The title isn't used when not in markdown mode. + """ + return NotifyBase.title_maxlen \ + if self.notify_format == NotifyFormat.MARKDOWN else 0 + + def url(self, privacy=False, *args, **kwargs): + """ + Returns the URL built dynamically based on specified arguments. + """ + + # Define any arguments set + args = { + 'format': self.notify_format, + 'overflow': self.overflow_mode, + 'verify': 'yes' if self.verify_certificate else 'no', + } + + return '{schema}://{secret}{token}/{targets}/?{args}'.format( + schema=self.secure_protocol, + secret='' if not self.secret else '{}@'.format(self.pprint( + self.secret, privacy, mode=PrivacyMode.Secret, safe='')), + token=self.pprint(self.token, privacy, safe=''), + targets='/'.join( + [NotifyDingTalk.quote(x, safe='') for x in self.targets]), + args=NotifyDingTalk.urlencode(args)) + + @staticmethod + def parse_url(url): + """ + Parses the URL and returns enough arguments that can allow + us to substantiate this object. + + """ + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + results['token'] = NotifyDingTalk.unquote(results['host']) + + # if a user has been defined, use it's value as the secret + if results.get('user'): + results['secret'] = results.get('user') + + # Get our entries; split_path() looks after unquoting content for us + # by default + results['targets'] = NotifyDingTalk.split_path(results['fullpath']) + + # Support the use of the `token` keyword argument + if 'token' in results['qsd'] and len(results['qsd']['token']): + results['token'] = \ + NotifyDingTalk.unquote(results['qsd']['token']) + + # Support the use of the `secret` keyword argument + if 'secret' in results['qsd'] and len(results['qsd']['secret']): + results['secret'] = \ + NotifyDingTalk.unquote(results['qsd']['secret']) + + # Support the 'to' variable so that we can support targets this way too + # The 'to' makes it easier to use yaml configuration + if 'to' in results['qsd'] and len(results['qsd']['to']): + results['targets'] += \ + NotifyDingTalk.parse_list(results['qsd']['to']) + + return results diff --git a/libs/apprise/plugins/NotifyDiscord.py b/libs/apprise/plugins/NotifyDiscord.py index 8a8b21f44..a4e7df6d4 100644 --- a/libs/apprise/plugins/NotifyDiscord.py +++ b/libs/apprise/plugins/NotifyDiscord.py @@ -80,6 +80,11 @@ class NotifyDiscord(NotifyBase): # The maximum allowable characters allowed in the body per message body_maxlen = 2000 + # Discord has a limit of the number of fields you can include in an + # embeds message. This value allows the discord message to safely + # break into multiple messages to handle these cases. + discord_max_fields = 10 + # Define object templates templates = ( '{schema}://{webhook_id}/{webhook_token}', @@ -133,6 +138,11 @@ class NotifyDiscord(NotifyBase): 'type': 'bool', 'default': True, }, + 'fields': { + 'name': _('Use Fields'), + 'type': 'bool', + 'default': True, + }, 'image': { 'name': _('Include Image'), 'type': 'bool', @@ -143,7 +153,7 @@ class NotifyDiscord(NotifyBase): def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, footer=False, footer_logo=True, include_image=False, - avatar_url=None, **kwargs): + fields=True, avatar_url=None, **kwargs): """ Initialize Discord Object @@ -181,6 +191,9 @@ class NotifyDiscord(NotifyBase): # Place a thumbnail image inline with the message body self.include_image = include_image + # Use Fields + self.fields = fields + # Avatar URL # This allows a user to provide an over-ride to the otherwise # dynamically generated avatar url images @@ -206,32 +219,23 @@ class NotifyDiscord(NotifyBase): # Acquire image_url image_url = self.image_url(notify_type) + # our fields variable + fields = [] + if self.notify_format == NotifyFormat.MARKDOWN: # Use embeds for payload payload['embeds'] = [{ - 'provider': { + 'author': { 'name': self.app_id, 'url': self.app_url, }, 'title': title, - 'type': 'rich', 'description': body, # Our color associated with our notification 'color': self.color(notify_type, int), }] - # Break titles out so that we can sort them in embeds - fields = self.extract_markdown_sections(body) - - if len(fields) > 0: - # Apply our additional parsing for a better presentation - - # Swap first entry for description - payload['embeds'][0]['description'] = \ - fields[0].get('name') + fields[0].get('value') - payload['embeds'][0]['fields'] = fields[1:] - if self.footer: # Acquire logo URL logo_url = self.image_url(notify_type, logo=True) @@ -251,6 +255,20 @@ class NotifyDiscord(NotifyBase): 'width': 256, } + if self.fields: + # Break titles out so that we can sort them in embeds + description, fields = self.extract_markdown_sections(body) + + # Swap first entry for description + payload['embeds'][0]['description'] = description + if fields: + # Apply our additional parsing for a better presentation + payload['embeds'][0]['fields'] = \ + fields[:self.discord_max_fields] + + # Remove entry from head of fields + fields = fields[self.discord_max_fields:] + else: # not markdown payload['content'] = \ @@ -268,6 +286,16 @@ class NotifyDiscord(NotifyBase): # We failed to post our message return False + # Process any remaining fields IF set + if fields: + payload['embeds'][0]['description'] = '' + for i in range(0, len(fields), self.discord_max_fields): + payload['embeds'][0]['fields'] = \ + fields[i:i + self.discord_max_fields] + if not self._send(payload): + # We failed to post our message + return False + if attach: # Update our payload; the idea is to preserve it's other detected # and assigned values for re-use here too @@ -413,8 +441,12 @@ class NotifyDiscord(NotifyBase): 'footer': 'yes' if self.footer else 'no', 'footer_logo': 'yes' if self.footer_logo else 'no', 'image': 'yes' if self.include_image else 'no', + 'fields': 'yes' if self.fields else 'no', } + if self.avatar_url: + params['avatar_url'] = self.avatar_url + # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) @@ -459,6 +491,11 @@ class NotifyDiscord(NotifyBase): # Text To Speech results['tts'] = parse_bool(results['qsd'].get('tts', False)) + # Use sections + # effectively detect multiple fields and break them off + # into sections + results['fields'] = parse_bool(results['qsd'].get('fields', True)) + # Use Footer results['footer'] = parse_bool(results['qsd'].get('footer', False)) @@ -513,6 +550,18 @@ class NotifyDiscord(NotifyBase): fields that get passed as an embed entry to Discord. """ + # Search for any header information found without it's own section + # identifier + match = re.match( + r'^\s*(?P[^\s#]+.*?)(?=\s*$|[\r\n]+\s*#)', + markdown, flags=re.S) + + description = match.group('desc').strip() if match else '' + if description: + # Strip description from our string since it has been handled + # now. + markdown = re.sub(description, '', markdown, count=1) + regex = re.compile( r'\s*#[# \t\v]*(?P[^\n]+)(\n|\s*$)' r'\s*((?P[^#].+?)(?=\s*$|[\r\n]+\s*#))?', flags=re.S) @@ -523,9 +572,11 @@ class NotifyDiscord(NotifyBase): d = el.groupdict() fields.append({ - 'name': d.get('name', '').strip('# \r\n\t\v'), - 'value': '```md\n' + - (d.get('value').strip() if d.get('value') else '') + '\n```' + 'name': d.get('name', '').strip('#`* \r\n\t\v'), + 'value': '```{}\n{}```'.format( + 'md' if d.get('value') else '', + d.get('value').strip() + '\n' if d.get('value') else '', + ), }) - return fields + return description, fields diff --git a/libs/apprise/plugins/NotifyEmail.py b/libs/apprise/plugins/NotifyEmail.py index 604fc5b5c..7bd894387 100644 --- a/libs/apprise/plugins/NotifyEmail.py +++ b/libs/apprise/plugins/NotifyEmail.py @@ -106,6 +106,21 @@ EMAIL_TEMPLATES = ( }, ), + # Yandex + ( + 'Yandex', + re.compile( + r'^((?P