Merge development into master

This commit is contained in:
github-actions[bot] 2022-08-31 02:43:49 +00:00 committed by GitHub
commit 5882fc07d2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
79 changed files with 4011 additions and 2460 deletions

View File

@ -37,7 +37,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: "16"
node-version: "lts/*"
- name: Install dependencies
run: npm install

View File

@ -38,7 +38,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: "15.x"
node-version: "lts/*"
- name: Install Global Tools
run: npm install -g release-it auto-changelog

View File

@ -40,7 +40,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: "15.x"
node-version: "lts/*"
- name: Install Global Tools
run: npm install -g release-it auto-changelog

View File

@ -24,7 +24,7 @@ jobs:
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: "15.x"
node-version: "lts/*"
- name: Install UI Dependencies
run: npm install

View File

@ -64,7 +64,7 @@ class EpisodesBlacklist(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
media_path = episodeInfo['path']
subtitles_path = request.form.get('subtitles_path')

View File

@ -22,7 +22,7 @@ class Episodes(Resource):
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\
.dicts()
else:
return "Series or Episode ID not provided", 400
return "Series or Episode ID not provided", 404
result = list(result)
for item in result:

View File

@ -39,7 +39,7 @@ class EpisodesSubtitles(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
@ -98,7 +98,7 @@ class EpisodesSubtitles(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
@ -164,7 +164,7 @@ class EpisodesSubtitles(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo['path'])

View File

@ -60,7 +60,7 @@ class MoviesBlacklist(Resource):
data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get_or_none()
if not data:
return 'Movie not found', 500
return 'Movie not found', 404
media_path = data['path']
subtitles_path = request.form.get('subtitles_path')

View File

@ -49,7 +49,7 @@ class Movies(Resource):
try:
profileId = int(profileId)
except Exception:
return '', 400
return 'Languages profile not found', 404
TableMovies.update({
TableMovies.profileId: profileId
@ -79,4 +79,4 @@ class Movies(Resource):
wanted_search_missing_subtitles_movies()
return '', 204
return '', 400
return 'Unknown action', 400

View File

@ -39,7 +39,7 @@ class MoviesSubtitles(Resource):
.get_or_none()
if not movieInfo:
return 'Movie not found', 500
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] or 'None'
@ -99,7 +99,7 @@ class MoviesSubtitles(Resource):
.get_or_none()
if not movieInfo:
return 'Movie not found', 500
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] or 'None'
@ -161,7 +161,7 @@ class MoviesSubtitles(Resource):
.get_or_none()
if not movieInfo:
return 'Movie not found', 500
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])

View File

@ -50,4 +50,4 @@ class Providers(Resource):
reset_throttled_providers()
return '', 204
return '', 400
return 'Unknown action', 400

View File

@ -30,7 +30,7 @@ class ProviderEpisodes(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
@ -58,7 +58,7 @@ class ProviderEpisodes(Resource):
.get_or_none()
if not episodeInfo:
return 'Episode not found', 500
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])

View File

@ -31,7 +31,7 @@ class ProviderMovies(Resource):
.get_or_none()
if not movieInfo:
return 'Movie not found', 500
return 'Movie not found', 404
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
@ -58,7 +58,7 @@ class ProviderMovies(Resource):
.get_or_none()
if not movieInfo:
return 'Movie not found', 500
return 'Movie not found', 404
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])

View File

@ -75,7 +75,7 @@ class Series(Resource):
try:
profileId = int(profileId)
except Exception:
return '', 400
return 'Languages profile not found', 404
TableShows.update({
TableShows.profileId: profileId
@ -113,4 +113,4 @@ class Series(Resource):
wanted_search_missing_subtitles_series()
return '', 204
return '', 400
return 'Unknown action', 400

View File

@ -37,14 +37,14 @@ class Subtitles(Resource):
.get_or_none()
if not metadata:
return 'Episode not found', 500
return 'Episode not found', 404
video_path = path_mappings.path_replace(metadata['path'])
else:
metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get_or_none()
if not metadata:
return 'Movie not found', 500
return 'Movie not found', 404
video_path = path_mappings.path_replace_movie(metadata['path'])

View File

@ -12,7 +12,7 @@ from utilities.helper import check_credentials
class SystemAccount(Resource):
def post(self):
if settings.auth.type != 'form':
return '', 405
return 'Unknown authentication type define in config.ini', 404
action = request.args.get('action')
if action == 'login':
@ -26,4 +26,4 @@ class SystemAccount(Resource):
gc.collect()
return '', 204
return '', 401
return 'Unknown action', 400

View File

@ -26,7 +26,7 @@ class SystemBackups(Resource):
restored = prepare_restore(filename)
if restored:
return '', 204
return '', 501
return 'Filename not provided', 400
@authenticate
def delete(self):
@ -35,4 +35,4 @@ class SystemBackups(Resource):
deleted = delete_backup_file(filename)
if deleted:
return '', 204
return '', 501
return 'Filename not provided', 400

View File

@ -39,7 +39,7 @@ class WebHooksPlex(Resource):
if len(splitted_id) == 2:
ids.append({splitted_id[0]: splitted_id[1]})
if not ids:
return '', 404
return 'No GUID found', 400
if media_type == 'episode':
try:
@ -53,7 +53,7 @@ class WebHooksPlex(Resource):
series_imdb_id = show_metadata_dict['props']['pageProps']['aboveTheFoldData']['series']['series']['id']
except Exception:
logging.debug('BAZARR is unable to get series IMDB id.')
return '', 404
return 'IMDB series ID not found', 404
else:
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
@ -69,7 +69,8 @@ class WebHooksPlex(Resource):
try:
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
except Exception:
return '', 404
logging.debug('BAZARR is unable to get movie IMDB id.')
return 'IMDB movie ID not found', 404
else:
radarrId = TableMovies.select(TableMovies.radarrId)\
.where(TableMovies.imdbId == movie_imdb_id)\

View File

@ -1,6 +1,9 @@
# coding=utf-8
from flask import Flask, redirect
import os
from flask_cors import CORS
from flask_socketio import SocketIO
from .get_args import args
@ -18,6 +21,9 @@ def create_app():
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True
app.config['JSON_AS_ASCII'] = False
if settings.get('cors', 'enabled'):
CORS(app)
if args.dev:
app.config["DEBUG"] = True
else:

View File

@ -20,6 +20,12 @@ class SimpleConfigParser(simpleconfigparser):
return None
def base_url_slash_cleaner(uri):
while "//" in uri:
uri = uri.replace("//", "/")
return uri
defaults = {
'general': {
'ip': '0.0.0.0',
@ -79,6 +85,9 @@ defaults = {
'username': '',
'password': ''
},
'cors': {
'enabled': 'False'
},
'backup': {
'folder': os.path.join(args.config_dir, 'backup'),
'retention': '31',
@ -206,6 +215,7 @@ defaults = {
'included_codecs': '[]',
'hi_fallback': 'False',
'timeout': '600',
'unknown_as_english': 'False',
},
'karagarga': {
'username': '',
@ -219,7 +229,8 @@ defaults = {
'subsync_threshold': '90',
'use_subsync_movie_threshold': 'False',
'subsync_movie_threshold': '70',
'debug': 'False'
'debug': 'False',
'force_audio': 'False'
},
'series_scores': {
"hash": 359,
@ -284,6 +295,11 @@ if int(settings.sonarr.episodes_sync) < 15:
if int(settings.radarr.movies_sync) < 15:
settings.radarr.movies_sync = "60"
# Make sure to get of double slashes in base_url
settings.general.base_url = base_url_slash_cleaner(uri=settings.general.base_url)
settings.sonarr.base_url = base_url_slash_cleaner(uri=settings.sonarr.base_url)
settings.radarr.base_url = base_url_slash_cleaner(uri=settings.radarr.base_url)
if os.path.exists(os.path.join(args.config_dir, 'config', 'config.ini')):
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
settings.write(handle)
@ -373,6 +389,9 @@ def save_settings(settings_items):
elif value == 'false':
value = 'False'
if key in ['settings-general-base_url', 'settings-sonarr-base_url', 'settings-radarr-base_url']:
value = base_url_slash_cleaner(value)
if key == 'settings-auth-password':
if value != settings.auth.password and value is not None:
value = hashlib.md5(value.encode('utf-8')).hexdigest()

View File

@ -436,9 +436,14 @@ def get_desired_languages(profile_id):
if profile_id and profile_id != 'null':
for profile in profile_id_list:
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat = profile.values()
if profileId == int(profile_id):
languages = [x['language'] for x in items]
break
try:
profile_id_int = int(profile_id)
except ValueError:
continue
else:
if profileId == profile_id_int:
languages = [x['language'] for x in items]
break
return languages

View File

@ -232,6 +232,7 @@ def get_providers_auth():
'ffprobe_path': _FFPROBE_BINARY,
'ffmpeg_path': _FFMPEG_BINARY,
'timeout': settings.embeddedsubtitles.timeout,
'unknown_as_english': settings.embeddedsubtitles.getboolean('unknown_as_english'),
},
'karagarga': {
'username': settings.karagarga.username,

View File

@ -3,11 +3,14 @@
import logging
import json
import time
import threading
from requests import Session
from signalr import Connection
from requests.exceptions import ConnectionError
from signalrcore.hub_connection_builder import HubConnectionBuilder
from collections import deque
from time import sleep
from constants import headers
from sonarr.sync.episodes import sync_episodes, sync_one_episode
@ -21,6 +24,12 @@ from .scheduler import scheduler
from .get_args import args
sonarr_queue = deque()
radarr_queue = deque()
last_event_data = None
class SonarrSignalrClientLegacy:
def __init__(self):
super(SonarrSignalrClientLegacy, self).__init__()
@ -46,8 +55,9 @@ class SonarrSignalrClientLegacy:
except json.decoder.JSONDecodeError:
logging.error("BAZARR cannot parse JSON returned by SignalR feed. This is caused by a permissions "
"issue when Sonarr try to access its /config/.config directory."
"Typically permissions are too permissive - only the user and group Sonarr runs as should have Read/Write permissions (e.g. files 664 / folders 775)"
"You should fix permissions on that directory and restart Sonarr. Also, if you're a Docker image "
"Typically permissions are too permissive - only the user and group Sonarr runs as "
"should have Read/Write permissions (e.g. files 664 / folders 775). You should fix "
"permissions on that directory and restart Sonarr. Also, if you're a Docker image "
"user, you should make sure you properly defined PUID/PGID environment variables. "
"Otherwise, please contact Sonarr support.")
else:
@ -61,21 +71,19 @@ class SonarrSignalrClientLegacy:
try:
self.connection.close()
except Exception:
pass
self.connection.started = False
if log:
logging.info('BAZARR SignalR client for Sonarr is now disconnected.')
def restart(self):
if self.connection:
if self.connection.started:
try:
self.stop(log=False)
except Exception:
self.connection.started = False
self.stop(log=False)
if settings.general.getboolean('use_sonarr'):
self.start()
def exception_handler(self, type, exception, traceback):
def exception_handler(self):
sonarr_queue.clear()
logging.error('BAZARR connection to Sonarr SignalR feed has been lost.')
self.restart()
@ -87,7 +95,7 @@ class SonarrSignalrClientLegacy:
sonarr_method = ['series', 'episode']
for item in sonarr_method:
sonarr_hub.client.on(item, dispatcher)
sonarr_hub.client.on(item, feed_queue)
self.connection.exception += self.exception_handler
@ -119,6 +127,7 @@ class SonarrSignalrClient:
self.start()
def exception_handler(self):
sonarr_queue.clear()
logging.error("BAZARR connection to Sonarr SignalR feed has failed. We'll try to reconnect.")
self.restart()
@ -148,7 +157,7 @@ class SonarrSignalrClient:
'Trying to reconnect...'))
self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Sonarr is disconnected.'))
self.connection.on_error(self.exception_handler)
self.connection.on("receiveMessage", dispatcher)
self.connection.on("receiveMessage", feed_queue)
class RadarrSignalrClient:
@ -178,6 +187,7 @@ class RadarrSignalrClient:
self.start()
def exception_handler(self):
radarr_queue.clear()
logging.error("BAZARR connection to Radarr SignalR feed has failed. We'll try to reconnect.")
self.restart()
@ -206,38 +216,49 @@ class RadarrSignalrClient:
'Trying to reconnect...'))
self.connection.on_close(lambda: logging.debug('BAZARR SignalR client for Radarr is disconnected.'))
self.connection.on_error(self.exception_handler)
self.connection.on("receiveMessage", dispatcher)
self.connection.on("receiveMessage", feed_queue)
def dispatcher(data):
try:
topic = media_id = action = None
episodesChanged = None
if isinstance(data, dict):
series_title = series_year = episode_title = season_number = episode_number = movie_title = movie_year = None
#
try:
episodesChanged = False
topic = data['name']
try:
media_id = data['body']['resource']['id']
action = data['body']['action']
media_id = data['body']['resource']['id']
action = data['body']['action']
if topic == 'series':
if 'episodesChanged' in data['body']['resource']:
episodesChanged = data['body']['resource']['episodesChanged']
except KeyError:
return
elif isinstance(data, list):
topic = data[0]['name']
try:
media_id = data[0]['body']['resource']['id']
action = data[0]['body']['action']
except KeyError:
return
series_title = data['body']['resource']['title']
series_year = data['body']['resource']['year']
elif topic == 'episode':
series_title = data['body']['resource']['series']['title']
series_year = data['body']['resource']['series']['year']
episode_title = data['body']['resource']['title']
season_number = data['body']['resource']['seasonNumber']
episode_number = data['body']['resource']['episodeNumber']
elif topic == 'movie':
movie_title = data['body']['resource']['title']
movie_year = data['body']['resource']['year']
except KeyError:
return
if topic == 'series':
logging.debug(f'Event received from Sonarr for series: {series_title} ({series_year})')
update_one_series(series_id=media_id, action=action)
if episodesChanged:
# this will happen if a season monitored status is changed.
sync_episodes(series_id=media_id, send_event=True)
elif topic == 'episode':
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')
sync_one_episode(episode_id=media_id, defer_search=settings.sonarr.getboolean('defer_search_signalr'))
elif topic == 'movie':
logging.debug(f'Event received from Radarr for movie: {movie_title} ({movie_year})')
update_one_movie(movie_id=media_id, action=action,
defer_search=settings.radarr.getboolean('defer_search_signalr'))
except Exception as e:
@ -246,6 +267,43 @@ def dispatcher(data):
return
def feed_queue(data):
# check if event is duplicate from the previous one
global last_event_data
if data == last_event_data:
return
else:
last_event_data = data
# some sonarr version send event as a list of a single dict, we make it a dict
if isinstance(data, list) and len(data):
data = data[0]
# if data is a dict and contain an event for series, episode or movie, we add it to the event queue
if isinstance(data, dict) and 'name' in data:
if data['name'] in ['series', 'episode']:
sonarr_queue.append(data)
elif data['name'] == 'movie':
radarr_queue.append(data)
def consume_queue(queue):
# get events data from queue one at a time and dispatch it
while True:
try:
data = queue.popleft()
except IndexError:
pass
else:
dispatcher(data)
sleep(0.1)
# start both queue consuming threads
threading.Thread(target=consume_queue, args=(sonarr_queue,)).start()
threading.Thread(target=consume_queue, args=(radarr_queue,)).start()
# instantiate proper SignalR client
sonarr_signalr_client = SonarrSignalrClientLegacy() if get_sonarr_info.version().startswith(('0.', '2.', '3.')) else \
SonarrSignalrClient()
radarr_signalr_client = RadarrSignalrClient()

View File

@ -2,6 +2,7 @@
import os
import requests
import mimetypes
from flask import request, abort, render_template, Response, session, send_file, stream_with_context, Blueprint
from functools import wraps
@ -30,6 +31,14 @@ static_bp = Blueprint('images', __name__,
ui_bp.register_blueprint(static_bp)
mimetypes.add_type('application/javascript', '.js')
mimetypes.add_type('text/css', '.css')
mimetypes.add_type('font/woff2', '.woff2')
mimetypes.add_type('image/svg+xml', '.svg')
mimetypes.add_type('image/png', '.png')
mimetypes.add_type('image/x-icon', '.ico')
def check_login(actual_method):
@wraps(actual_method)
def wrapper(*args, **kwargs):
@ -122,6 +131,12 @@ def movies_images(url):
return Response(stream_with_context(req.iter_content(2048)), content_type=req.headers['content-type'])
@check_login
@ui_bp.route('/system/backup/download/<path:filename>', methods=['GET'])
def backup_download(filename):
return send_file(os.path.join(settings.backup.folder, filename), cache_timeout=0, as_attachment=True)
def configured():
System.update({System.configured: '1'}).execute()

View File

@ -1,5 +1,6 @@
# coding=utf-8
import os
import logging
from peewee import IntegrityError
@ -69,7 +70,12 @@ def update_movies(send_event=True):
if movie['hasFile'] is True:
if 'movieFile' in movie:
if movie['movieFile']['size'] > 20480:
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace_movie(movie['movieFile']['path']))
except OSError:
bazarr_file_size = 0
if movie['movieFile']['size'] > 20480 or bazarr_file_size > 20480:
# Add movies in radarr to current movies list
current_movies_radarr.append(str(movie['tmdbId']))

View File

@ -1,5 +1,6 @@
# coding=utf-8
import os
import logging
from peewee import IntegrityError
@ -71,7 +72,12 @@ def sync_episodes(series_id=None, send_event=True):
if 'hasFile' in episode:
if episode['hasFile'] is True:
if 'episodeFile' in episode:
if episode['episodeFile']['size'] > 20480:
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode['id'])

View File

@ -4,6 +4,7 @@ import os
from app.database import TableShows
from sonarr.info import get_sonarr_info
from utilities.path_mappings import path_mappings
from .converter import SonarrFormatVideoCodec, SonarrFormatAudioCodec
@ -28,7 +29,10 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
if get_sonarr_info.is_legacy():
audio_language = profile_id_to_language(show['qualityProfileId'], audio_profiles)
else:
audio_language = profile_id_to_language(show['languageProfileId'], audio_profiles)
if 'languageProfileId' in show:
audio_language = profile_id_to_language(show['languageProfileId'], audio_profiles)
else:
audio_language = []
tags = [d['label'] for d in tags_dict if d['id'] in show['tags']]
@ -79,7 +83,11 @@ def episodeParser(episode):
if 'hasFile' in episode:
if episode['hasFile'] is True:
if 'episodeFile' in episode:
if episode['episodeFile']['size'] > 20480:
try:
bazarr_file_size = os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
if 'sceneName' in episode['episodeFile']:
sceneName = episode['episodeFile']['sceneName']
else:
@ -91,6 +99,12 @@ def episodeParser(episode):
if isinstance(item, dict):
if 'name' in item:
audio_language.append(item['name'])
elif 'languages' in episode['episodeFile'] and len(episode['episodeFile']['languages']):
items = episode['episodeFile']['languages']
if isinstance(items, list):
for item in items:
if 'name' in item:
audio_language.append(item['name'])
else:
audio_language = TableShows.get(TableShows.sonarrSeriesId == episode['seriesId']).audio_language

View File

@ -30,6 +30,10 @@ def get_profile_list():
logging.exception("BAZARR Error trying to get profiles from Sonarr.")
return None
# return an empty list when using Sonarr v4 that do not support series languages profiles anymore
if profiles_json.status_code == 404:
return profiles_list
# Parsing data returned from Sonarr
if get_sonarr_info.is_legacy():
for profile in profiles_json.json():
@ -87,7 +91,11 @@ def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
logging.exception("BAZARR Error trying to get series from Sonarr.")
return
else:
return r.json()
result = r.json()
if isinstance(result, dict):
return list(result)
else:
return r.json()
def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=None):

View File

@ -55,7 +55,7 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
if subtitle_hi:
lang = lang + ':hi'
logging.debug("BAZARR embedded subtitles detected: " + lang)
actual_subtitles.append([lang, None])
actual_subtitles.append([lang, None, None])
except Exception:
logging.debug("BAZARR unable to index this unrecognized language: " + subtitle_language)
pass
@ -68,6 +68,22 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
try:
dest_folder = get_subtitle_destination_folder() or ''
core.CUSTOM_PATHS = [dest_folder] if dest_folder else []
# get previously indexed subtitles that haven't changed:
item = TableMovies.select(TableMovies.subtitles) \
.where(TableMovies.path == original_path) \
.dicts() \
.get_or_none()
if not item:
previously_indexed_subtitles_to_exclude = []
else:
previously_indexed_subtitles = ast.literal_eval(item['subtitles']) if item['subtitles'] else []
previously_indexed_subtitles_to_exclude = [x for x in previously_indexed_subtitles
if len(x) == 3 and
x[1] and
os.path.isfile(path_mappings.path_replace(x[1])) and
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set())
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
@ -75,7 +91,8 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
full_dest_folder_path = dest_folder
elif settings.general.subfolder == "relative":
full_dest_folder_path = os.path.join(os.path.dirname(reversed_path), dest_folder)
subtitles = guess_external_subtitles(full_dest_folder_path, subtitles)
subtitles = guess_external_subtitles(full_dest_folder_path, subtitles, "movie",
previously_indexed_subtitles_to_exclude)
except Exception:
logging.exception("BAZARR unable to index external subtitles.")
pass
@ -107,7 +124,8 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
else:
language_str = str(language)
logging.debug("BAZARR external subtitles detected: " + language_str)
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path)])
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path),
os.stat(subtitle_path).st_size])
TableMovies.update({TableMovies.subtitles: str(actual_subtitles)})\
.where(TableMovies.path == original_path)\

View File

@ -55,7 +55,7 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
if subtitle_hi:
lang = lang + ":hi"
logging.debug("BAZARR embedded subtitles detected: " + lang)
actual_subtitles.append([lang, None])
actual_subtitles.append([lang, None, None])
except Exception as error:
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)", subtitle_language, error)
except Exception:
@ -66,6 +66,22 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
try:
dest_folder = get_subtitle_destination_folder()
core.CUSTOM_PATHS = [dest_folder] if dest_folder else []
# get previously indexed subtitles that haven't changed:
item = TableEpisodes.select(TableEpisodes.subtitles) \
.where(TableEpisodes.path == original_path) \
.dicts() \
.get_or_none()
if not item:
previously_indexed_subtitles_to_exclude = []
else:
previously_indexed_subtitles = ast.literal_eval(item['subtitles']) if item['subtitles'] else []
previously_indexed_subtitles_to_exclude = [x for x in previously_indexed_subtitles
if len(x) == 3 and
x[1] and
os.path.isfile(path_mappings.path_replace(x[1])) and
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
full_dest_folder_path = os.path.dirname(reversed_path)
@ -74,7 +90,8 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
full_dest_folder_path = dest_folder
elif settings.general.subfolder == "relative":
full_dest_folder_path = os.path.join(os.path.dirname(reversed_path), dest_folder)
subtitles = guess_external_subtitles(full_dest_folder_path, subtitles)
subtitles = guess_external_subtitles(full_dest_folder_path, subtitles, "series",
previously_indexed_subtitles_to_exclude)
except Exception:
logging.exception("BAZARR unable to index external subtitles.")
else:
@ -105,7 +122,8 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
else:
language_str = str(language)
logging.debug("BAZARR external subtitles detected: " + language_str)
actual_subtitles.append([language_str, path_mappings.path_replace_reverse(subtitle_path)])
actual_subtitles.append([language_str, path_mappings.path_replace_reverse(subtitle_path),
os.stat(subtitle_path).st_size])
TableEpisodes.update({TableEpisodes.subtitles: str(actual_subtitles)})\
.where(TableEpisodes.path == original_path)\

View File

@ -11,6 +11,7 @@ from charamel import Detector
from app.config import settings
from constants import hi_regex
from utilities.path_mappings import path_mappings
def get_external_subtitles_path(file, subtitle):
@ -40,16 +41,34 @@ def get_external_subtitles_path(file, subtitle):
return path
def guess_external_subtitles(dest_folder, subtitles):
def guess_external_subtitles(dest_folder, subtitles, media_type, previously_indexed_subtitles_to_exclude=None):
for subtitle, language in subtitles.items():
subtitle_path = os.path.join(dest_folder, subtitle)
reversed_subtitle_path = path_mappings.path_replace_reverse(subtitle_path) if media_type == "series" \
else path_mappings.path_replace_reverse_movie(subtitle_path)
if previously_indexed_subtitles_to_exclude:
x_found_lang = None
for x_lang, x_path, x_size in previously_indexed_subtitles_to_exclude:
if x_path == reversed_subtitle_path and x_size == os.stat(subtitle_path).st_size:
x_found_lang = x_lang
break
if x_found_lang:
if not language:
x_hi = ':hi' in x_found_lang
subtitles[subtitle] = Language.rebuild(Language.fromietf(x_found_lang), hi=x_hi)
continue
if not language:
subtitle_path = os.path.join(dest_folder, subtitle)
if os.path.exists(subtitle_path) and os.path.splitext(subtitle_path)[1] in core.SUBTITLE_EXTENSIONS:
logging.debug("BAZARR falling back to file content analysis to detect language.")
detected_language = None
# detect forced subtitles
forced = True if os.path.splitext(os.path.splitext(subtitle)[0])[1] == '.forced' else False
# to improve performance, skip detection of files larger that 1M
if os.path.getsize(subtitle_path) > 1*1024*1024:
if os.path.getsize(subtitle_path) > 1 * 1024 * 1024:
logging.debug("BAZARR subtitles file is too large to be text based. Skipping this file: " +
subtitle_path)
continue
@ -92,21 +111,21 @@ def guess_external_subtitles(dest_folder, subtitles):
logging.debug("BAZARR external subtitles detected and guessed this language: " + str(
detected_language))
try:
subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=False,
subtitles[subtitle] = Language.rebuild(Language.fromietf(detected_language), forced=forced,
hi=False)
except Exception:
pass
# If language is still None (undetected), skip it
if not language:
pass
if hasattr(subtitles[subtitle], 'basename') and not subtitles[subtitle].basename:
continue
# Skip HI detection if forced
elif language.forced:
pass
if hasattr(language, 'forced') and language.forced:
continue
# Detect hearing-impaired external subtitles not identified in filename
elif not subtitles[subtitle].hi:
if hasattr(subtitles[subtitle], 'hi') and not subtitles[subtitle].hi:
subtitle_path = os.path.join(dest_folder, subtitle)
# check if file exist:

View File

@ -81,11 +81,9 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
radarr_id=movie_metadata['radarrId'])
if use_postprocessing is True:
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
downloaded_language_code2, downloaded_language_code3, audio_language,
audio_language_code2, audio_language_code3, subtitle.language.forced,
percent_score, subtitle_id, downloaded_provider, series_id, episode_id,
subtitle.language.hi)
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language, downloaded_language_code2,
downloaded_language_code3, audio_language, audio_language_code2, audio_language_code3,
percent_score, subtitle_id, downloaded_provider, series_id, episode_id)
if media_type == 'series':
use_pp_threshold = settings.general.getboolean('use_postprocessing_threshold')

View File

@ -54,6 +54,10 @@ class SubSyncer:
try:
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path,
'--vad', self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.getboolean('force_audio'):
unparsed_args.append('--no-fix-framerate ')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.getboolean('debug'):
unparsed_args.append('--make-test-case')
parser = make_parser()

View File

@ -37,7 +37,7 @@ def upgrade_subtitles():
if settings.general.getboolean('use_sonarr'):
upgradable_episodes_conditions = [(TableHistory.action << query_actions),
(TableHistory.timestamp > minimum_timestamp),
(TableHistory.score is not None)]
(TableHistory.score.is_null(False))]
upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path,
TableHistory.language,
@ -86,7 +86,7 @@ def upgrade_subtitles():
if settings.general.getboolean('use_radarr'):
upgradable_movies_conditions = [(TableHistoryMovie.action << query_actions),
(TableHistoryMovie.timestamp > minimum_timestamp),
(TableHistoryMovie.score is not None)]
(TableHistoryMovie.score.is_null(False))]
upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
TableHistoryMovie.language,

View File

@ -150,10 +150,9 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
percent_score=100, radarr_id=movie_metadata['radarrId'], forced=forced)
if use_postprocessing:
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language,
uploaded_language_code2, uploaded_language_code3, audio_language,
audio_language_code2, audio_language_code3, forced, 100, "1", "manual", series_id,
episode_id, hi=hi)
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, uploaded_language_code2,
uploaded_language_code3, audio_language, audio_language_code2, audio_language_code3, 100,
"1", "manual", series_id, episode_id)
postprocessing(command, path)
if media_type == 'series':

View File

@ -4,7 +4,7 @@
"machine": "aarch64",
"directory": "unrar",
"name": "unrar",
"checksum": "07a6371cc7db8493352739ce26b19ea1",
"checksum": "be7a08a75ffb1dcc5f8c6b16a75e822c",
"url": "https://github.com/morpheus65535/bazarr-binaries/raw/master/bin/Linux/aarch64/unrar/unrar"
},
{

View File

@ -1,24 +1,36 @@
# coding=utf-8
import os
import re
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language, episode_language_code2, episode_language_code3, forced, score, subtitle_id, provider, series_id, episode_id, hi):
pp_command = pp_command.replace('{{directory}}', os.path.dirname(episode))
pp_command = pp_command.replace('{{episode}}', episode)
pp_command = pp_command.replace('{{episode_name}}', os.path.splitext(os.path.basename(episode))[0])
pp_command = pp_command.replace('{{subtitles}}', str(subtitles))
pp_command = pp_command.replace('{{subtitles_language}}', str(language))
pp_command = pp_command.replace('{{subtitles_language_code2}}', str(language_code2))
pp_command = pp_command.replace('{{subtitles_language_code3}}', str(language_code3))
pp_command = pp_command.replace('{{subtitles_language_code2_dot}}', str(language_code2).replace(':', '.'))
pp_command = pp_command.replace('{{subtitles_language_code3_dot}}', str(language_code3).replace(':', '.'))
pp_command = pp_command.replace('{{episode_language}}', str(episode_language))
pp_command = pp_command.replace('{{episode_language_code2}}', str(episode_language_code2))
pp_command = pp_command.replace('{{episode_language_code3}}', str(episode_language_code3))
pp_command = pp_command.replace('{{score}}', str(score))
pp_command = pp_command.replace('{{subtitle_id}}', str(subtitle_id))
pp_command = pp_command.replace('{{provider}}', str(provider))
pp_command = pp_command.replace('{{series_id}}', str(series_id))
pp_command = pp_command.replace('{{episode_id}}', str(episode_id))
# Wraps the input string within quotes & escapes the string
def _escape(in_str):
raw_map = {8: r'\\b', 7: r'\\a', 12: r'\\f', 10: r'\\n', 13: r'\\r', 9: r'\\t', 11: r'\\v', 34: r'\"', 92: r'\\'}
raw_str = r''.join(raw_map.get(ord(i), i) for i in in_str)
return f"\"{raw_str}\""
def pp_replace(pp_command, episode, subtitles, language, language_code2, language_code3, episode_language,
episode_language_code2, episode_language_code3, score, subtitle_id, provider, series_id, episode_id):
pp_command = re.sub(r'[\'"]?{{directory}}[\'"]?', _escape(os.path.dirname(episode)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode}}[\'"]?', _escape(episode), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_name}}[\'"]?', _escape(os.path.splitext(os.path.basename(episode))[0]),
pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles}}[\'"]?', _escape(str(subtitles)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles_language}}[\'"]?', _escape(str(language)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles_language_code2}}[\'"]?', _escape(str(language_code2)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles_language_code3}}[\'"]?', _escape(str(language_code3)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles_language_code2_dot}}[\'"]?',
_escape(str(language_code2).replace(':', '.')), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitles_language_code3_dot}}[\'"]?',
_escape(str(language_code3).replace(':', '.')), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_language}}[\'"]?', _escape(str(episode_language)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_language_code2}}[\'"]?', _escape(str(episode_language_code2)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_language_code3}}[\'"]?', _escape(str(episode_language_code3)), pp_command)
pp_command = re.sub(r'[\'"]?{{score}}[\'"]?', _escape(str(score)), pp_command)
pp_command = re.sub(r'[\'"]?{{subtitle_id}}[\'"]?', _escape(str(subtitle_id)), pp_command)
pp_command = re.sub(r'[\'"]?{{provider}}[\'"]?', _escape(str(provider)), pp_command)
pp_command = re.sub(r'[\'"]?{{series_id}}[\'"]?', _escape(str(series_id)), pp_command)
pp_command = re.sub(r'[\'"]?{{episode_id}}[\'"]?', _escape(str(episode_id)), pp_command)
return pp_command

View File

@ -3,5 +3,5 @@
[ -n "$CI" ] && exit 0
cd frontend
npx pretty-quick --staged
cd frontend || exit
npx pretty-quick --staged --pattern "frontend/**/*.*"

File diff suppressed because it is too large Load Diff

View File

@ -50,7 +50,7 @@
"lodash": "^4",
"moment": "^2.29",
"prettier": "^2",
"prettier-plugin-organize-imports": "^2",
"prettier-plugin-organize-imports": "^3",
"pretty-quick": "^3",
"react-dropzone": "^14",
"react-table": "^7",

View File

@ -24,8 +24,10 @@ const App: FunctionComponent = () => {
setCriticalError(detail.message);
});
useWindowEvent("app-login-required", () => {
navigate("/login");
useWindowEvent("app-auth-changed", (ev) => {
if (!ev.detail.authenticated) {
navigate("/login");
}
});
useWindowEvent("app-online-status", ({ detail }) => {

View File

@ -10,6 +10,8 @@ import api from "../raw";
const cacheEpisodes = (client: QueryClient, episodes: Item.Episode[]) => {
episodes.forEach((item) => {
client.setQueryData([QueryKeys.Episodes, item.sonarrEpisodeId], item);
client.setQueryData(
[
QueryKeys.Series,

View File

@ -1,5 +1,5 @@
import { Environment } from "@/utilities";
import { setLoginRequired } from "@/utilities/event";
import { setAuthenticated } from "@/utilities/event";
import { useMemo } from "react";
import { useMutation, useQuery, useQueryClient } from "react-query";
import { QueryKeys } from "../queries/keys";
@ -173,7 +173,7 @@ export function useSystem() {
() => api.system.logout(),
{
onSuccess: () => {
setLoginRequired();
setAuthenticated(false);
client.clear();
},
}

View File

@ -1,7 +1,7 @@
import SocketIO from "@/modules/socketio";
import socketio from "@/modules/socketio";
import { notification } from "@/modules/task";
import { LOG } from "@/utilities/console";
import { setLoginRequired } from "@/utilities/event";
import { setAuthenticated } from "@/utilities/event";
import { showNotification } from "@mantine/notifications";
import Axios, { AxiosError, AxiosInstance, CancelTokenSource } from "axios";
import { Environment } from "../../utilities";
@ -17,17 +17,19 @@ function GetErrorMessage(data: unknown, defaultMsg = "Unknown error"): string {
class BazarrClient {
axios!: AxiosInstance;
source!: CancelTokenSource;
bIsAuthenticated: boolean;
constructor() {
this.bIsAuthenticated = false;
const baseUrl = `${Environment.baseUrl}/api/`;
LOG("info", "initializing BazarrClient with", baseUrl);
this.initialize(baseUrl, Environment.apiKey);
SocketIO.initialize();
socketio.initialize();
}
initialize(url: string, apikey?: string) {
LOG("info", "initializing BazarrClient with baseUrl", url);
this.axios = Axios.create({
baseURL: url,
});
@ -45,6 +47,10 @@ class BazarrClient {
this.axios.interceptors.response.use(
(resp) => {
if (resp.status >= 200 && resp.status < 300) {
if (!this.bIsAuthenticated) {
this.bIsAuthenticated = true;
setAuthenticated(true);
}
return Promise.resolve(resp);
} else {
const error: BackendError = {
@ -78,8 +84,9 @@ class BazarrClient {
const { code, message } = error;
switch (code) {
case 401:
setLoginRequired();
break;
this.bIsAuthenticated = false;
setAuthenticated(false);
return;
}
LOG("error", "A error has occurred", code);

View File

@ -291,6 +291,7 @@ const SeriesUploadForm: FunctionComponent<Props> = ({
return (
<Selector
{...episodeOptions}
searchable
className={classes.select}
value={value}
onChange={(item) => {

View File

@ -47,7 +47,12 @@ const TimeOffsetForm: FunctionComponent<Props> = ({ selections, onSubmit }) => {
return (
<form
onSubmit={form.onSubmit(({ positive, hour, min, sec, ms }) => {
const action = convertToAction(hour, min, sec, ms);
let action: string;
if (positive) {
action = convertToAction(hour, min, sec, ms);
} else {
action = convertToAction(-hour, -min, -sec, -ms);
}
selections.forEach((s) =>
task.create(s.path, TaskName, mutateAsync, {

View File

@ -17,6 +17,7 @@ import {
Anchor,
Badge,
Button,
Code,
Collapse,
Divider,
Group,
@ -26,6 +27,7 @@ import {
Text,
} from "@mantine/core";
import { useHover } from "@mantine/hooks";
import { isString } from "lodash";
import { FunctionComponent, useCallback, useMemo, useState } from "react";
import { UseQueryResult } from "react-query";
import { Column } from "react-table";
@ -186,6 +188,9 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
[download, item]
);
const bSceneNameAvailable =
isString(item.sceneName) && item.sceneName.length !== 0;
return (
<Stack>
<Alert
@ -193,7 +198,9 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
color="gray"
icon={<FontAwesomeIcon icon={faInfoCircle}></FontAwesomeIcon>}
>
{item?.path}
<Text size="sm">{item?.path}</Text>
<Divider hidden={!bSceneNameAvailable} my="xs"></Divider>
<Code hidden={!bSceneNameAvailable}>{item?.sceneName}</Code>
</Alert>
<Collapse in={!isStale && !results.isFetching}>
<PageTable

View File

@ -1,8 +1,8 @@
import { debounce, forIn, remove, uniq } from "lodash";
import { onlineManager } from "react-query";
import { io, Socket } from "socket.io-client";
import { Environment } from "../../utilities";
import { ENSURE, LOG } from "../../utilities/console";
import { Environment, isDevEnv } from "../../utilities";
import { ENSURE, GROUP, LOG } from "../../utilities/console";
import { createDefaultReducer } from "./reducer";
class SocketIOClient {
@ -31,13 +31,50 @@ class SocketIOClient {
this.reducers = [];
onlineManager.setOnline(false);
if (isDevEnv) {
window.socketIO = {
dump: () => {
GROUP("Socket.IO Reducers", (logger) => {
this.reducers.forEach((reducer) => {
logger(reducer.key);
});
});
},
emit: (e) => {
if (e) {
this.onEvent(e);
}
},
};
}
}
initialize() {
LOG("info", "Initializing Socket.IO client...");
this.reducers.push(...createDefaultReducer());
window.addEventListener("app-auth-changed", (ev) => {
const authenticated = ev.detail.authenticated;
LOG("info", "Authentication status change to", authenticated);
if (authenticated) {
this.connect();
} else {
this.disconnect();
}
});
}
connect() {
LOG("info", "Connecting Socket.IO client...");
this.socket.connect();
}
disconnect() {
LOG("info", "Disconnecting Socket.IO client...");
this.socket.disconnect();
}
addReducer(reducer: SocketIO.Reducer) {
this.reducers.push(reducer);
}

View File

@ -1,5 +1,6 @@
import queryClient from "@/apis/queries";
import { QueryKeys } from "@/apis/queries/keys";
import { LOG } from "@/utilities/console";
import { setCriticalError, setOnlineStatus } from "@/utilities/event";
import { showNotification } from "@mantine/notifications";
import { notification, task } from "../task";
@ -36,11 +37,13 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
{
key: "series",
update: (ids) => {
LOG("info", "Invalidating series", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Series, id]);
});
},
delete: (ids) => {
LOG("info", "Invalidating series", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Series, id]);
});
@ -49,11 +52,13 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
{
key: "movie",
update: (ids) => {
LOG("info", "Invalidating movies", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Movies, id]);
});
},
delete: (ids) => {
LOG("info", "Invalidating movies", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Movies, id]);
});
@ -62,13 +67,36 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
{
key: "episode",
update: (ids) => {
// Currently invalidate episodes is impossible because we don't directly fetch episodes (we fetch episodes by series id)
// So we need to invalidate series instead
// TODO: Make a query for episodes and invalidate that instead
LOG("info", "Invalidating episodes", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Episodes, id]);
const episode = queryClient.getQueryData<Item.Episode>([
QueryKeys.Episodes,
id,
]);
if (episode !== undefined) {
queryClient.invalidateQueries([
QueryKeys.Series,
episode.sonarrSeriesId,
]);
}
});
},
delete: (ids) => {
LOG("info", "Invalidating episodes", ids);
ids.forEach((id) => {
queryClient.invalidateQueries([QueryKeys.Episodes, id]);
const episode = queryClient.getQueryData<Item.Episode>([
QueryKeys.Episodes,
id,
]);
if (episode !== undefined) {
queryClient.invalidateQueries([
QueryKeys.Series,
episode.sonarrSeriesId,
]);
}
});
},
},

View File

@ -21,7 +21,6 @@ import {
Selector,
Text,
} from "../components";
import { BaseUrlModification } from "../utilities/modifications";
import { branchOptions, proxyOptions, securityOptions } from "./options";
const characters = "abcdef0123456789";
@ -58,7 +57,7 @@ const SettingsGeneralView: FunctionComponent = () => {
icon="/"
settingKey="settings-general-base_url"
settingOptions={{
onLoaded: BaseUrlModification,
onLoaded: (s) => s.general.base_url?.slice(1) ?? "",
onSubmit: (v) => "/" + v,
}}
></Text>
@ -116,6 +115,14 @@ const SettingsGeneralView: FunctionComponent = () => {
}
settingKey={settingApiKey}
></Text>
<Check
label="Enable CORS headers"
settingKey="settings-cors-enabled"
></Check>
<Message>
Allow third parties to make requests towards your Bazarr installation.
Requires a restart of Bazarr when changed
</Message>
</Section>
<Section header="Proxy">
<Selector

View File

@ -103,6 +103,11 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
key: "hi_fallback",
name: "Use HI subtitles as a fallback (don't enable it if you have a HI language profile)",
},
{
type: "switch",
key: "unknown_as_english",
name: "Use subtitles with unknown info/language as english",
},
],
message:
"Warning for cloud users: this provider needs to read the entire file in order to extract subtitles.",

View File

@ -14,7 +14,6 @@ import {
URLTestButton,
} from "../components";
import { moviesEnabledKey } from "../keys";
import { BaseUrlModification } from "../utilities/modifications";
const SettingsRadarrView: FunctionComponent = () => {
return (
@ -32,7 +31,7 @@ const SettingsRadarrView: FunctionComponent = () => {
icon="/"
settingKey="settings-radarr-base_url"
settingOptions={{
onLoaded: BaseUrlModification,
onLoaded: (s) => s.radarr.base_url?.slice(1) ?? "",
onSubmit: (v) => "/" + v,
}}
></Text>

View File

@ -16,7 +16,6 @@ import {
} from "../components";
import { seriesEnabledKey } from "../keys";
import { seriesTypeOptions } from "../options";
import { BaseUrlModification } from "../utilities/modifications";
const SettingsSonarrView: FunctionComponent = () => {
return (
@ -34,7 +33,7 @@ const SettingsSonarrView: FunctionComponent = () => {
icon="/"
settingKey="settings-sonarr-base_url"
settingOptions={{
onLoaded: BaseUrlModification,
onLoaded: (s) => s.sonarr.base_url?.slice(1) ?? "",
onSubmit: (v) => "/" + v,
}}
></Text>

View File

@ -364,6 +364,14 @@ const SettingsSubtitlesView: FunctionComponent = () => {
<Text placeholder="0777" settingKey="settings-general-chmod"></Text>
<Message>Must be 4 digit octal</Message>
</CollapseBox>
<Check
label="Always use Audio Track as Reference for Syncing"
settingKey="settings-subsync-force_audio"
></Check>
<Message>
Use the audio track as reference for syncing, instead of using the
embedded subtitle.
</Message>
<Check
label="Automatic Subtitles Synchronization"
settingKey="settings-subsync-use_subsync"

View File

@ -3,9 +3,10 @@ import { Toolbox } from "@/components";
import { LoadingProvider } from "@/contexts";
import { useOnValueChange } from "@/utilities";
import { LOG } from "@/utilities/console";
import { usePrompt } from "@/utilities/routers";
import { useUpdateLocalStorage } from "@/utilities/storage";
import { faSave } from "@fortawesome/free-solid-svg-icons";
import { Container, Group, LoadingOverlay } from "@mantine/core";
import { Badge, Container, Group, LoadingOverlay } from "@mantine/core";
import { useDocumentTitle, useForm } from "@mantine/hooks";
import { FunctionComponent, ReactNode, useCallback, useMemo } from "react";
import { enabledLanguageKey, languageProfileKey } from "../keys";
@ -84,6 +85,11 @@ const Layout: FunctionComponent<Props> = (props) => {
return Object.keys(object).length;
}, [form.values.settings, form.values.storages]);
usePrompt(
totalStagedCount > 0,
`You have ${totalStagedCount} unsaved changes, are you sure you want to leave?`
);
useDocumentTitle(`${name} - Bazarr (Settings)`);
if (settings === undefined) {
@ -101,6 +107,11 @@ const Layout: FunctionComponent<Props> = (props) => {
icon={faSave}
loading={isMutating}
disabled={totalStagedCount === 0}
rightIcon={
<Badge size="xs" radius="sm" hidden={totalStagedCount === 0}>
{totalStagedCount}
</Badge>
}
>
Save
</Toolbox.Button>

View File

@ -130,15 +130,16 @@ type SliderProps = BaseInput<number> &
export const Slider: FunctionComponent<SliderProps> = (props) => {
const { value, update, rest } = useBaseInput(props);
const { label, ...sliderProps } = rest;
const { min = 0, max = 100 } = props;
const marks = useSliderMarks([min, max]);
return (
<InputWrapper label={rest.label}>
<InputWrapper label={label}>
<MantineSlider
{...rest}
{...sliderProps}
marks={marks}
onChange={update}
value={value ?? 0}

View File

@ -18,10 +18,16 @@ export const URLTestButton: FunctionComponent<{
const click = useCallback(() => {
if (address && apikey && ssl !== null) {
let testUrl: string;
let baseUrl = url;
if (baseUrl && baseUrl.startsWith("/") === false) {
baseUrl = "/" + baseUrl;
}
if (port) {
testUrl = `${address}:${port}${url ?? ""}`;
testUrl = `${address}:${port}${baseUrl ?? ""}`;
} else {
testUrl = `${address}${url ?? ""}`;
testUrl = `${address}${baseUrl ?? ""}`;
}
const request = {
protocol: ssl ? "https" : "http",

View File

@ -1,6 +1,3 @@
export const BaseUrlModification = (settings: Settings) =>
settings.general.base_url?.slice(1) ?? "";
export const SubzeroModification = (key: string) => (settings: Settings) =>
settings.general.subzero_mods?.includes(key) ?? false;

View File

@ -2,9 +2,10 @@ import { useDeleteBackups, useRestoreBackups } from "@/apis/hooks";
import { Action, PageTable } from "@/components";
import { useModals } from "@/modules/modals";
import { useTableStyles } from "@/styles";
import { Environment } from "@/utilities";
import { faClock, faHistory, faTrash } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { Group, Text } from "@mantine/core";
import { Anchor, Group, Text } from "@mantine/core";
import { FunctionComponent, useMemo } from "react";
import { Column } from "react-table";
@ -23,8 +24,13 @@ const Table: FunctionComponent<Props> = ({ backups }) => {
Header: "Name",
accessor: "filename",
Cell: ({ value }) => {
const { classes } = useTableStyles();
return <Text className={classes.primary}>{value}</Text>;
return (
<Anchor
href={`${Environment.baseUrl}/system/backup/download/${value}`}
>
{value}
</Anchor>
);
},
},
{

View File

@ -111,6 +111,7 @@ declare namespace Settings {
use_subsync_movie_threshold: boolean;
subsync_movie_threshold: number;
debug: boolean;
force_audio: boolean;
}
interface Analytic {

View File

@ -6,12 +6,12 @@ interface SocketIODebugger {
declare global {
interface Window {
Bazarr: BazarrServer;
_socketio: SocketIODebugger;
socketIO: SocketIODebugger;
}
interface WindowEventMap {
"app-auth-changed": CustomEvent<{ authenticated: boolean }>;
"app-critical-error": CustomEvent<{ message: string }>;
"app-login-required": CustomEvent;
"app-online-status": CustomEvent<{ online: boolean }>;
}
}

View File

@ -23,4 +23,15 @@ export function ENSURE(condition: boolean, msg: string, ...payload: any[]) {
}
}
export function GROUP(
header: string,
content: (logger: typeof console.log) => void
) {
if (!isProdEnv) {
console.group(header);
content(console.log);
console.groupEnd();
}
}
export const ASSERT = console.assert;

View File

@ -7,8 +7,8 @@ function createEvent<
return new CustomEvent<P>(event, { bubbles: true, detail: payload });
}
export function setLoginRequired() {
const event = createEvent("app-login-required", {});
export function setAuthenticated(authenticated: boolean) {
const event = createEvent("app-auth-changed", { authenticated });
window.dispatchEvent(event);
}

View File

@ -0,0 +1,40 @@
// A workaround of built-in hooks in React-Router v6
// https://gist.github.com/rmorse/426ffcc579922a82749934826fa9f743
import type { Blocker, History, Transition } from "history";
import { useContext, useEffect } from "react";
import { UNSAFE_NavigationContext } from "react-router-dom";
export function useBlocker(blocker: Blocker, when = true) {
const navigator = useContext(UNSAFE_NavigationContext).navigator as History;
useEffect(() => {
if (!when) return;
const unblock = navigator.block((tx: Transition) => {
const autoUnblockingTx = {
...tx,
retry() {
// Automatically unblock the transition so it can play all the way
// through before retrying it. TODO: Figure out how to re-enable
// this block if the transition is cancelled for some reason.
unblock();
tx.retry();
},
};
blocker(autoUnblockingTx);
});
return unblock;
}, [navigator, blocker, when]);
}
// TODO: Replace with Mantine's confirmation modal
export function usePrompt(when: boolean, message: string) {
useBlocker((tx) => {
if (window.confirm(message)) {
tx.retry();
}
}, when);
}

View File

@ -4,4 +4,4 @@
from .container import FFprobeVideoContainer
from .stream import FFprobeSubtitleStream
__version__ = "0.2"
__version__ = "0.2.2"

View File

@ -8,12 +8,21 @@ from .exceptions import LanguageNotFound
logger = logging.getLogger(__name__)
LANGUAGE_FALLBACK = None
class FFprobeGenericSubtitleTags:
_DETECTABLE_TAGS = None
def __init__(self, data: dict):
self.language = _get_language(data)
try:
self.language = _get_language(data)
except LanguageNotFound:
if LANGUAGE_FALLBACK is not None:
self.language = Language.fromietf(LANGUAGE_FALLBACK)
else:
raise
self._data = data
@classmethod

View File

@ -0,0 +1,27 @@
# -*- coding: utf-8 -*-
"""
flask_cors
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2016 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
from .decorator import cross_origin
from .extension import CORS
from .version import __version__
__all__ = ['CORS', 'cross_origin']
# Set default logging handler to avoid "No handler found" warnings.
import logging
from logging import NullHandler
# Set initial level to WARN. Users must manually enable logging for
# flask_cors to see our logging.
rootlogger = logging.getLogger(__name__)
rootlogger.addHandler(NullHandler())
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)

383
libs/flask_cors/core.py Normal file
View File

@ -0,0 +1,383 @@
# -*- coding: utf-8 -*-
"""
core
~~~~
Core functionality shared between the extension and the decorator.
:copyright: (c) 2016 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
import re
import logging
try:
# on python 3
from collections.abc import Iterable
except ImportError:
# on python 2.7 and pypy
from collections import Iterable
from datetime import timedelta
from six import string_types
from flask import request, current_app
from werkzeug.datastructures import Headers, MultiDict
LOG = logging.getLogger(__name__)
# Response Headers
ACL_ORIGIN = 'Access-Control-Allow-Origin'
ACL_METHODS = 'Access-Control-Allow-Methods'
ACL_ALLOW_HEADERS = 'Access-Control-Allow-Headers'
ACL_EXPOSE_HEADERS = 'Access-Control-Expose-Headers'
ACL_CREDENTIALS = 'Access-Control-Allow-Credentials'
ACL_MAX_AGE = 'Access-Control-Max-Age'
# Request Header
ACL_REQUEST_METHOD = 'Access-Control-Request-Method'
ACL_REQUEST_HEADERS = 'Access-Control-Request-Headers'
ALL_METHODS = ['GET', 'HEAD', 'POST', 'OPTIONS', 'PUT', 'PATCH', 'DELETE']
CONFIG_OPTIONS = ['CORS_ORIGINS', 'CORS_METHODS', 'CORS_ALLOW_HEADERS',
'CORS_EXPOSE_HEADERS', 'CORS_SUPPORTS_CREDENTIALS',
'CORS_MAX_AGE', 'CORS_SEND_WILDCARD',
'CORS_AUTOMATIC_OPTIONS', 'CORS_VARY_HEADER',
'CORS_RESOURCES', 'CORS_INTERCEPT_EXCEPTIONS',
'CORS_ALWAYS_SEND']
# Attribute added to request object by decorator to indicate that CORS
# was evaluated, in case the decorator and extension are both applied
# to a view.
FLASK_CORS_EVALUATED = '_FLASK_CORS_EVALUATED'
# Strange, but this gets the type of a compiled regex, which is otherwise not
# exposed in a public API.
RegexObject = type(re.compile(''))
DEFAULT_OPTIONS = dict(origins='*',
methods=ALL_METHODS,
allow_headers='*',
expose_headers=None,
supports_credentials=False,
max_age=None,
send_wildcard=False,
automatic_options=True,
vary_header=True,
resources=r'/*',
intercept_exceptions=True,
always_send=True)
def parse_resources(resources):
if isinstance(resources, dict):
# To make the API more consistent with the decorator, allow a
# resource of '*', which is not actually a valid regexp.
resources = [(re_fix(k), v) for k, v in resources.items()]
# Sort by regex length to provide consistency of matching and
# to provide a proxy for specificity of match. E.G. longer
# regular expressions are tried first.
def pattern_length(pair):
maybe_regex, _ = pair
return len(get_regexp_pattern(maybe_regex))
return sorted(resources,
key=pattern_length,
reverse=True)
elif isinstance(resources, string_types):
return [(re_fix(resources), {})]
elif isinstance(resources, Iterable):
return [(re_fix(r), {}) for r in resources]
# Type of compiled regex is not part of the public API. Test for this
# at runtime.
elif isinstance(resources, RegexObject):
return [(re_fix(resources), {})]
else:
raise ValueError("Unexpected value for resources argument.")
def get_regexp_pattern(regexp):
"""
Helper that returns regexp pattern from given value.
:param regexp: regular expression to stringify
:type regexp: _sre.SRE_Pattern or str
:returns: string representation of given regexp pattern
:rtype: str
"""
try:
return regexp.pattern
except AttributeError:
return str(regexp)
def get_cors_origins(options, request_origin):
origins = options.get('origins')
wildcard = r'.*' in origins
# If the Origin header is not present terminate this set of steps.
# The request is outside the scope of this specification.-- W3Spec
if request_origin:
LOG.debug("CORS request received with 'Origin' %s", request_origin)
# If the allowed origins is an asterisk or 'wildcard', always match
if wildcard and options.get('send_wildcard'):
LOG.debug("Allowed origins are set to '*'. Sending wildcard CORS header.")
return ['*']
# If the value of the Origin header is a case-sensitive match
# for any of the values in list of origins
elif try_match_any(request_origin, origins):
LOG.debug("The request's Origin header matches. Sending CORS headers.", )
# Add a single Access-Control-Allow-Origin header, with either
# the value of the Origin header or the string "*" as value.
# -- W3Spec
return [request_origin]
else:
LOG.debug("The request's Origin header does not match any of allowed origins.")
return None
elif options.get('always_send'):
if wildcard:
# If wildcard is in the origins, even if 'send_wildcard' is False,
# simply send the wildcard. Unless supports_credentials is True,
# since that is forbidded by the spec..
# It is the most-likely to be correct thing to do (the only other
# option is to return nothing, which almost certainly not what
# the developer wants if the '*' origin was specified.
if options.get('supports_credentials'):
return None
else:
return ['*']
else:
# Return all origins that are not regexes.
return sorted([o for o in origins if not probably_regex(o)])
# Terminate these steps, return the original request untouched.
else:
LOG.debug("The request did not contain an 'Origin' header. This means the browser or client did not request CORS, ensure the Origin Header is set.")
return None
def get_allow_headers(options, acl_request_headers):
if acl_request_headers:
request_headers = [h.strip() for h in acl_request_headers.split(',')]
# any header that matches in the allow_headers
matching_headers = filter(
lambda h: try_match_any(h, options.get('allow_headers')),
request_headers
)
return ', '.join(sorted(matching_headers))
return None
def get_cors_headers(options, request_headers, request_method):
origins_to_set = get_cors_origins(options, request_headers.get('Origin'))
headers = MultiDict()
if not origins_to_set: # CORS is not enabled for this route
return headers
for origin in origins_to_set:
headers.add(ACL_ORIGIN, origin)
headers[ACL_EXPOSE_HEADERS] = options.get('expose_headers')
if options.get('supports_credentials'):
headers[ACL_CREDENTIALS] = 'true' # case sensative
# This is a preflight request
# http://www.w3.org/TR/cors/#resource-preflight-requests
if request_method == 'OPTIONS':
acl_request_method = request_headers.get(ACL_REQUEST_METHOD, '').upper()
# If there is no Access-Control-Request-Method header or if parsing
# failed, do not set any additional headers
if acl_request_method and acl_request_method in options.get('methods'):
# If method is not a case-sensitive match for any of the values in
# list of methods do not set any additional headers and terminate
# this set of steps.
headers[ACL_ALLOW_HEADERS] = get_allow_headers(options, request_headers.get(ACL_REQUEST_HEADERS))
headers[ACL_MAX_AGE] = options.get('max_age')
headers[ACL_METHODS] = options.get('methods')
else:
LOG.info("The request's Access-Control-Request-Method header does not match allowed methods. CORS headers will not be applied.")
# http://www.w3.org/TR/cors/#resource-implementation
if options.get('vary_header'):
# Only set header if the origin returned will vary dynamically,
# i.e. if we are not returning an asterisk, and there are multiple
# origins that can be matched.
if headers[ACL_ORIGIN] == '*':
pass
elif (len(options.get('origins')) > 1 or
len(origins_to_set) > 1 or
any(map(probably_regex, options.get('origins')))):
headers.add('Vary', 'Origin')
return MultiDict((k, v) for k, v in headers.items() if v)
def set_cors_headers(resp, options):
"""
Performs the actual evaluation of Flas-CORS options and actually
modifies the response object.
This function is used both in the decorator and the after_request
callback
"""
# If CORS has already been evaluated via the decorator, skip
if hasattr(resp, FLASK_CORS_EVALUATED):
LOG.debug('CORS have been already evaluated, skipping')
return resp
# Some libraries, like OAuthlib, set resp.headers to non Multidict
# objects (Werkzeug Headers work as well). This is a problem because
# headers allow repeated values.
if (not isinstance(resp.headers, Headers)
and not isinstance(resp.headers, MultiDict)):
resp.headers = MultiDict(resp.headers)
headers_to_set = get_cors_headers(options, request.headers, request.method)
LOG.debug('Settings CORS headers: %s', str(headers_to_set))
for k, v in headers_to_set.items():
resp.headers.add(k, v)
return resp
def probably_regex(maybe_regex):
if isinstance(maybe_regex, RegexObject):
return True
else:
common_regex_chars = ['*', '\\', ']', '?', '$', '^', '[', ']', '(', ')']
# Use common characters used in regular expressions as a proxy
# for if this string is in fact a regex.
return any((c in maybe_regex for c in common_regex_chars))
def re_fix(reg):
"""
Replace the invalid regex r'*' with the valid, wildcard regex r'/.*' to
enable the CORS app extension to have a more user friendly api.
"""
return r'.*' if reg == r'*' else reg
def try_match_any(inst, patterns):
return any(try_match(inst, pattern) for pattern in patterns)
def try_match(request_origin, maybe_regex):
"""Safely attempts to match a pattern or string to a request origin."""
if isinstance(maybe_regex, RegexObject):
return re.match(maybe_regex, request_origin)
elif probably_regex(maybe_regex):
return re.match(maybe_regex, request_origin, flags=re.IGNORECASE)
else:
try:
return request_origin.lower() == maybe_regex.lower()
except AttributeError:
return request_origin == maybe_regex
def get_cors_options(appInstance, *dicts):
"""
Compute CORS options for an application by combining the DEFAULT_OPTIONS,
the app's configuration-specified options and any dictionaries passed. The
last specified option wins.
"""
options = DEFAULT_OPTIONS.copy()
options.update(get_app_kwarg_dict(appInstance))
if dicts:
for d in dicts:
options.update(d)
return serialize_options(options)
def get_app_kwarg_dict(appInstance=None):
"""Returns the dictionary of CORS specific app configurations."""
app = (appInstance or current_app)
# In order to support blueprints which do not have a config attribute
app_config = getattr(app, 'config', {})
return {
k.lower().replace('cors_', ''): app_config.get(k)
for k in CONFIG_OPTIONS
if app_config.get(k) is not None
}
def flexible_str(obj):
"""
A more flexible str function which intelligently handles stringifying
strings, lists and other iterables. The results are lexographically sorted
to ensure generated responses are consistent when iterables such as Set
are used.
"""
if obj is None:
return None
elif(not isinstance(obj, string_types)
and isinstance(obj, Iterable)):
return ', '.join(str(item) for item in sorted(obj))
else:
return str(obj)
def serialize_option(options_dict, key, upper=False):
if key in options_dict:
value = flexible_str(options_dict[key])
options_dict[key] = value.upper() if upper else value
def ensure_iterable(inst):
"""
Wraps scalars or string types as a list, or returns the iterable instance.
"""
if isinstance(inst, string_types):
return [inst]
elif not isinstance(inst, Iterable):
return [inst]
else:
return inst
def sanitize_regex_param(param):
return [re_fix(x) for x in ensure_iterable(param)]
def serialize_options(opts):
"""
A helper method to serialize and processes the options dictionary.
"""
options = (opts or {}).copy()
for key in opts.keys():
if key not in DEFAULT_OPTIONS:
LOG.warning("Unknown option passed to Flask-CORS: %s", key)
# Ensure origins is a list of allowed origins with at least one entry.
options['origins'] = sanitize_regex_param(options.get('origins'))
options['allow_headers'] = sanitize_regex_param(options.get('allow_headers'))
# This is expressly forbidden by the spec. Raise a value error so people
# don't get burned in production.
if r'.*' in options['origins'] and options['supports_credentials'] and options['send_wildcard']:
raise ValueError("Cannot use supports_credentials in conjunction with"
"an origin string of '*'. See: "
"http://www.w3.org/TR/cors/#resource-requests")
serialize_option(options, 'expose_headers')
serialize_option(options, 'methods', upper=True)
if isinstance(options.get('max_age'), timedelta):
options['max_age'] = str(int(options['max_age'].total_seconds()))
return options

View File

@ -0,0 +1,135 @@
# -*- coding: utf-8 -*-
"""
decorator
~~~~
This unit exposes a single decorator which should be used to wrap a
Flask route with. It accepts all parameters and options as
the CORS extension.
:copyright: (c) 2016 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
from functools import update_wrapper
from flask import make_response, request, current_app
from .core import *
LOG = logging.getLogger(__name__)
def cross_origin(*args, **kwargs):
"""
This function is the decorator which is used to wrap a Flask route with.
In the simplest case, simply use the default parameters to allow all
origins in what is the most permissive configuration. If this method
modifies state or performs authentication which may be brute-forced, you
should add some degree of protection, such as Cross Site Forgery
Request protection.
:param origins:
The origin, or list of origins to allow requests from.
The origin(s) may be regular expressions, case-sensitive strings,
or else an asterisk
Default : '*'
:type origins: list, string or regex
:param methods:
The method or list of methods which the allowed origins are allowed to
access for non-simple requests.
Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE]
:type methods: list or string
:param expose_headers:
The header or list which are safe to expose to the API of a CORS API
specification.
Default : None
:type expose_headers: list or string
:param allow_headers:
The header or list of header field names which can be used when this
resource is accessed by allowed origins. The header(s) may be regular
expressions, case-sensitive strings, or else an asterisk.
Default : '*', allow all headers
:type allow_headers: list, string or regex
:param supports_credentials:
Allows users to make authenticated requests. If true, injects the
`Access-Control-Allow-Credentials` header in responses. This allows
cookies and credentials to be submitted across domains.
:note: This option cannot be used in conjuction with a '*' origin
Default : False
:type supports_credentials: bool
:param max_age:
The maximum time for which this CORS request maybe cached. This value
is set as the `Access-Control-Max-Age` header.
Default : None
:type max_age: timedelta, integer, string or None
:param send_wildcard: If True, and the origins parameter is `*`, a wildcard
`Access-Control-Allow-Origin` header is sent, rather than the
request's `Origin` header.
Default : False
:type send_wildcard: bool
:param vary_header:
If True, the header Vary: Origin will be returned as per the W3
implementation guidelines.
Setting this header when the `Access-Control-Allow-Origin` is
dynamically generated (e.g. when there is more than one allowed
origin, and an Origin than '*' is returned) informs CDNs and other
caches that the CORS headers are dynamic, and cannot be cached.
If False, the Vary header will never be injected or altered.
Default : True
:type vary_header: bool
:param automatic_options:
Only applies to the `cross_origin` decorator. If True, Flask-CORS will
override Flask's default OPTIONS handling to return CORS headers for
OPTIONS requests.
Default : True
:type automatic_options: bool
"""
_options = kwargs
def decorator(f):
LOG.debug("Enabling %s for cross_origin using options:%s", f, _options)
# If True, intercept OPTIONS requests by modifying the view function,
# replicating Flask's default behavior, and wrapping the response with
# CORS headers.
#
# If f.provide_automatic_options is unset or True, Flask's route
# decorator (which is actually wraps the function object we return)
# intercepts OPTIONS handling, and requests will not have CORS headers
if _options.get('automatic_options', True):
f.required_methods = getattr(f, 'required_methods', set())
f.required_methods.add('OPTIONS')
f.provide_automatic_options = False
def wrapped_function(*args, **kwargs):
# Handle setting of Flask-Cors parameters
options = get_cors_options(current_app, _options)
if options.get('automatic_options') and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
set_cors_headers(resp, options)
setattr(resp, FLASK_CORS_EVALUATED, True)
return resp
return update_wrapper(wrapped_function, f)
return decorator

View File

@ -0,0 +1,190 @@
# -*- coding: utf-8 -*-
"""
extension
~~~~
Flask-CORS is a simple extension to Flask allowing you to support cross
origin resource sharing (CORS) using a simple decorator.
:copyright: (c) 2016 by Cory Dolphin.
:license: MIT, see LICENSE for more details.
"""
from flask import request
from .core import *
try:
from urllib.parse import unquote_plus
except ImportError:
from urllib import unquote_plus
LOG = logging.getLogger(__name__)
class CORS(object):
"""
Initializes Cross Origin Resource sharing for the application. The
arguments are identical to :py:func:`cross_origin`, with the addition of a
`resources` parameter. The resources parameter defines a series of regular
expressions for resource paths to match and optionally, the associated
options to be applied to the particular resource. These options are
identical to the arguments to :py:func:`cross_origin`.
The settings for CORS are determined in the following order
1. Resource level settings (e.g when passed as a dictionary)
2. Keyword argument settings
3. App level configuration settings (e.g. CORS_*)
4. Default settings
Note: as it is possible for multiple regular expressions to match a
resource path, the regular expressions are first sorted by length,
from longest to shortest, in order to attempt to match the most
specific regular expression. This allows the definition of a
number of specific resource options, with a wildcard fallback
for all other resources.
:param resources:
The series of regular expression and (optionally) associated CORS
options to be applied to the given resource path.
If the argument is a dictionary, it's keys must be regular expressions,
and the values must be a dictionary of kwargs, identical to the kwargs
of this function.
If the argument is a list, it is expected to be a list of regular
expressions, for which the app-wide configured options are applied.
If the argument is a string, it is expected to be a regular expression
for which the app-wide configured options are applied.
Default : Match all and apply app-level configuration
:type resources: dict, iterable or string
:param origins:
The origin, or list of origins to allow requests from.
The origin(s) may be regular expressions, case-sensitive strings,
or else an asterisk
Default : '*'
:type origins: list, string or regex
:param methods:
The method or list of methods which the allowed origins are allowed to
access for non-simple requests.
Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE]
:type methods: list or string
:param expose_headers:
The header or list which are safe to expose to the API of a CORS API
specification.
Default : None
:type expose_headers: list or string
:param allow_headers:
The header or list of header field names which can be used when this
resource is accessed by allowed origins. The header(s) may be regular
expressions, case-sensitive strings, or else an asterisk.
Default : '*', allow all headers
:type allow_headers: list, string or regex
:param supports_credentials:
Allows users to make authenticated requests. If true, injects the
`Access-Control-Allow-Credentials` header in responses. This allows
cookies and credentials to be submitted across domains.
:note: This option cannot be used in conjunction with a '*' origin
Default : False
:type supports_credentials: bool
:param max_age:
The maximum time for which this CORS request maybe cached. This value
is set as the `Access-Control-Max-Age` header.
Default : None
:type max_age: timedelta, integer, string or None
:param send_wildcard: If True, and the origins parameter is `*`, a wildcard
`Access-Control-Allow-Origin` header is sent, rather than the
request's `Origin` header.
Default : False
:type send_wildcard: bool
:param vary_header:
If True, the header Vary: Origin will be returned as per the W3
implementation guidelines.
Setting this header when the `Access-Control-Allow-Origin` is
dynamically generated (e.g. when there is more than one allowed
origin, and an Origin than '*' is returned) informs CDNs and other
caches that the CORS headers are dynamic, and cannot be cached.
If False, the Vary header will never be injected or altered.
Default : True
:type vary_header: bool
"""
def __init__(self, app=None, **kwargs):
self._options = kwargs
if app is not None:
self.init_app(app, **kwargs)
def init_app(self, app, **kwargs):
# The resources and options may be specified in the App Config, the CORS constructor
# or the kwargs to the call to init_app.
options = get_cors_options(app, self._options, kwargs)
# Flatten our resources into a list of the form
# (pattern_or_regexp, dictionary_of_options)
resources = parse_resources(options.get('resources'))
# Compute the options for each resource by combining the options from
# the app's configuration, the constructor, the kwargs to init_app, and
# finally the options specified in the resources dictionary.
resources = [
(pattern, get_cors_options(app, options, opts))
for (pattern, opts) in resources
]
# Create a human readable form of these resources by converting the compiled
# regular expressions into strings.
resources_human = {get_regexp_pattern(pattern): opts for (pattern,opts) in resources}
LOG.debug("Configuring CORS with resources: %s", resources_human)
cors_after_request = make_after_request_function(resources)
app.after_request(cors_after_request)
# Wrap exception handlers with cross_origin
# These error handlers will still respect the behavior of the route
if options.get('intercept_exceptions', True):
def _after_request_decorator(f):
def wrapped_function(*args, **kwargs):
return cors_after_request(app.make_response(f(*args, **kwargs)))
return wrapped_function
if hasattr(app, 'handle_exception'):
app.handle_exception = _after_request_decorator(
app.handle_exception)
app.handle_user_exception = _after_request_decorator(
app.handle_user_exception)
def make_after_request_function(resources):
def cors_after_request(resp):
# If CORS headers are set in a view decorator, pass
if resp.headers is not None and resp.headers.get(ACL_ORIGIN):
LOG.debug('CORS have been already evaluated, skipping')
return resp
normalized_path = unquote_plus(request.path)
for res_regex, res_options in resources:
if try_match(normalized_path, res_regex):
LOG.debug("Request to '%s' matches CORS resource '%s'. Using options: %s",
request.path, get_regexp_pattern(res_regex), res_options)
set_cors_headers(resp, res_options)
break
else:
LOG.debug('No CORS rule matches')
return resp
return cors_after_request

View File

@ -0,0 +1 @@
__version__ = '3.0.10'

View File

@ -167,16 +167,19 @@ class Episode(Video):
if guess['type'] != 'episode':
raise ValueError('The guess must be an episode guess')
if 'title' not in guess or 'episode' not in guess:
raise ValueError('Insufficient data to process the guess')
# We'll ignore missing fields. The Video instance will be refined anyway.
# if 'title' not in guess or 'episode' not in guess:
# raise ValueError('Insufficient data to process the guess')
# Currently we only have single-ep support (guessit returns a multi-ep as a list with int values)
# Most providers only support single-ep, so make sure it contains only 1 episode
# In case of multi-ep, take the lowest episode (subtitles will normally be available on lowest episode number)
episode_guess = guess.get('episode')
episode_guess = guess.get('episode', 1)
episode = min(episode_guess) if episode_guess and isinstance(episode_guess, list) else episode_guess
return cls(name, guess['title'], guess.get('season', 1), episode, title=guess.get('episode_title'),
return cls(name, guess.get("title", "Unknown Title"), guess.get('season', 1), episode, title=guess.get('episode_title'),
year=guess.get('year'), source=guess.get('source'), original_series='year' not in guess,
release_group=guess.get('release_group'), resolution=guess.get('screen_size'),
video_codec=guess.get('video_codec'), audio_codec=guess.get('audio_codec'),
@ -220,14 +223,16 @@ class Movie(Video):
if guess['type'] != 'movie':
raise ValueError('The guess must be a movie guess')
if 'title' not in guess:
raise ValueError('Insufficient data to process the guess')
# We'll ignore missing fields. The Video instance will be refined anyway.
# if 'title' not in guess:
# raise ValueError('Insufficient data to process the guess')
alternative_titles = []
if 'alternative_title' in guess:
alternative_titles.append(u"%s %s" % (guess['title'], guess['alternative_title']))
return cls(name, guess['title'], source=guess.get('source'), release_group=guess.get('release_group'),
return cls(name, guess.get('title', 'Unknown Title'), source=guess.get('source'), release_group=guess.get('release_group'),
resolution=guess.get('screen_size'), video_codec=guess.get('video_codec'),
audio_codec=guess.get('audio_codec'), year=guess.get('year'), alternative_titles=alternative_titles,
streaming_service=guess.get("streaming_service"), edition=guess.get("edition"))

View File

@ -70,6 +70,16 @@ def remove_crap_from_fn(fn):
return REMOVE_CRAP_FROM_FILENAME.sub(repl, fn)
def _nested_update(item, to_update):
for k, v in to_update.items():
if isinstance(v, dict):
item[k] = _nested_update(item.get(k, {}), v)
else:
item[k] = v
return item
class _ProviderConfigs(dict):
def __init__(self, pool, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -108,7 +118,9 @@ class _ProviderConfigs(dict):
else:
logger.debug("No provider config updates")
return super().update(items)
_nested_update(self, items)
return None
class _Banlist:
@ -727,7 +739,6 @@ def scan_video(path, dont_use_actual_file=False, hints=None, providers=None, ski
"""
hints = hints or {}
video_type = hints.get("type")
# check for non-existing path
if not dont_use_actual_file and not os.path.exists(path):
@ -740,42 +751,15 @@ def scan_video(path, dont_use_actual_file=False, hints=None, providers=None, ski
dirpath, filename = os.path.split(path)
logger.info('Determining basic video properties for %r in %r', filename, dirpath)
# hint guessit the filename itself and its 2 parent directories if we're an episode (most likely
# Series name/Season/filename), else only one
split_path = os.path.normpath(path).split(os.path.sep)[-3 if video_type == "episode" else -2:]
# remove crap from folder names
if video_type == "episode":
if len(split_path) > 2:
split_path[-3] = remove_crap_from_fn(split_path[-3])
else:
if len(split_path) > 1:
split_path[-2] = remove_crap_from_fn(split_path[-2])
guess_from = os.path.join(*split_path)
# remove crap from file name
guess_from = remove_crap_from_fn(guess_from)
# guess
hints["single_value"] = True
# if "title" in hints:
# hints["expected_title"] = [hints["title"]]
guessed_result = guessit(guess_from, options=hints)
guessed_result = guessit(path, options=hints)
logger.debug('GuessIt found: %s', json.dumps(guessed_result, cls=GuessitEncoder, indent=4, ensure_ascii=False))
video = Video.fromguess(path, guessed_result)
video.hints = hints
# get possibly alternative title from the filename itself
alt_guess = guessit(filename, options=hints)
if "title" in alt_guess and alt_guess["title"] != guessed_result["title"]:
if video_type == "episode":
video.alternative_series.append(alt_guess["title"])
else:
video.alternative_titles.append(alt_guess["title"])
logger.debug("Adding alternative title: %s", alt_guess["title"])
video.hints = hints # ?
if dont_use_actual_file and not hash_from:
return video

View File

@ -79,9 +79,7 @@ class EmbeddedSubtitlesProvider(Provider):
ffmpeg_path=None,
hi_fallback=False,
timeout=600,
include_ass=None,
include_srt=None,
mergerfs_mode=None,
unknown_as_english=False,
):
self._included_codecs = set(included_codecs or _ALLOWED_CODECS)
@ -93,6 +91,7 @@ class EmbeddedSubtitlesProvider(Provider):
cache_dir or tempfile.gettempdir(), self.__class__.__name__.lower()
)
self._hi_fallback = hi_fallback
self._unknown_as_english = unknown_as_english
self._cached_paths = {}
self._timeout = int(timeout)
@ -105,6 +104,9 @@ class EmbeddedSubtitlesProvider(Provider):
# Default is True
container.FFMPEG_STATS = False
tags.LANGUAGE_FALLBACK = "en" if self._unknown_as_english else None
logger.debug("Language fallback set: %s", tags.LANGUAGE_FALLBACK)
def initialize(self):
os.makedirs(self._cache_dir, exist_ok=True)

View File

@ -191,7 +191,9 @@ class SubtitleModifications(object):
sub = processor.process(sub)
if sub.strip():
if not sub.isupper():
# only consider alphabetic characters to determine if uppercase
alpha_sub = ''.join([i for i in sub if i.isalpha()])
if alpha_sub and not alpha_sub.isupper():
return False
entry_used = True

View File

@ -8,6 +8,7 @@ dogpile.cache==1.1.5
enzyme==0.4.1
fese==0.1.2
ffsubsync==0.4.20
flask-cors==3.0.10
flask-restful==0.3.9
Flask-SocketIO==5.1.1
Flask==2.0.2

View File

@ -34,6 +34,7 @@ def test_get_providers_auth_embeddedsubtitles():
assert isinstance(item["ffprobe_path"], str)
assert isinstance(item["ffmpeg_path"], str)
assert isinstance(item["timeout"], str)
assert isinstance(item["unknown_as_english"], bool)
def test_get_providers_auth_karagarga():

View File

@ -0,0 +1,19 @@
from pathlib import Path
from subliminal_patch import core
def test_scan_video_movie(tmpdir):
video_path = Path(tmpdir, "Taxi Driver 1976 Bluray 720p x264.mkv")
video_path.touch()
result = core.scan_video(str(video_path))
assert isinstance(result, core.Movie)
def test_scan_video_episode(tmpdir):
video_path = Path(tmpdir, "The Wire S01E01 Bluray 720p x264.mkv")
video_path.touch()
result = core.scan_video(str(video_path))
assert isinstance(result, core.Episode)

View File

@ -4,6 +4,7 @@ import os
from fese import FFprobeSubtitleStream
from fese import FFprobeVideoContainer
from fese import tags
from fese.exceptions import LanguageNotFound
import pytest
from subliminal_patch.core import Episode
from subliminal_patch.core import Movie
@ -123,13 +124,36 @@ def fake_streams():
}
@pytest.mark.parametrize("tags_", [{}, {"language": "und", "title": "Unknown"}])
def test_list_subtitles_unknown_as_english(mocker, tags_):
with EmbeddedSubtitlesProvider(unknown_as_english=True):
fake = FFprobeSubtitleStream(
{"index": 3, "codec_name": "subrip", "tags": tags_}
)
mocker.patch(
"subliminal_patch.providers.embeddedsubtitles._MemoizedFFprobeVideoContainer.get_subtitles",
return_value=[fake],
)
streams = _MemoizedFFprobeVideoContainer.get_subtitles("")
assert len(streams) == 1
assert streams[0].language == Language.fromietf("en")
@pytest.mark.parametrize("tags_", [{}, {"language": "und", "title": "Unknown"}])
def test_list_subtitles_unknown_as_english_disabled(tags_):
with EmbeddedSubtitlesProvider(unknown_as_english=False):
with pytest.raises(LanguageNotFound):
assert FFprobeSubtitleStream(
{"index": 3, "codec_name": "subrip", "tags": tags_}
)
def test_list_subtitles_hi_fallback_one_stream(
video_single_language, fake_streams, mocker
):
with EmbeddedSubtitlesProvider(hi_fallback=True) as provider:
language = Language.fromalpha2("en")
mocker.patch(
# "fese.FFprobeVideoContainer.get_subtitles",
"subliminal_patch.providers.embeddedsubtitles._MemoizedFFprobeVideoContainer.get_subtitles",
return_value=[fake_streams["en_hi"]],
)