mirror of https://github.com/morpheus65535/bazarr
Merge development into master
This commit is contained in:
commit
71a2c758b7
|
@ -10,5 +10,5 @@ latest_verion=$(git describe --tags --abbrev=0)
|
|||
if [[ $RELEASE_MASTER -eq 1 ]]; then
|
||||
auto-changelog --stdout -t changelog-master.hbs --starting-version "$master_version" --commit-limit 3
|
||||
else
|
||||
auto-changelog --stdout --starting-version "$latest_verion" --unreleased --commit-limit 0
|
||||
auto-changelog --stdout --starting-version "$latest_verion" --unreleased-only --commit-limit 0
|
||||
fi
|
|
@ -8,12 +8,13 @@ from flask_restx import Resource, Namespace, fields
|
|||
from app.database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies
|
||||
from app.get_providers import get_throttled_providers
|
||||
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client
|
||||
from app.announcements import get_all_announcements
|
||||
from utilities.health import get_health_issues
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
api_ns_badges = Namespace('Badges', description='Get badges count to update the UI (episodes and movies wanted '
|
||||
'subtitles, providers with issues and health issues.')
|
||||
'subtitles, providers with issues, health issues and announcements.')
|
||||
|
||||
|
||||
@api_ns_badges.route('badges')
|
||||
|
@ -25,6 +26,7 @@ class Badges(Resource):
|
|||
'status': fields.Integer(),
|
||||
'sonarr_signalr': fields.String(),
|
||||
'radarr_signalr': fields.String(),
|
||||
'announcements': fields.Integer(),
|
||||
})
|
||||
|
||||
@authenticate
|
||||
|
@ -62,5 +64,6 @@ class Badges(Resource):
|
|||
"status": health_issues,
|
||||
'sonarr_signalr': "LIVE" if sonarr_signalr_client.connected else "",
|
||||
'radarr_signalr': "LIVE" if radarr_signalr_client.connected else "",
|
||||
'announcements': len(get_all_announcements()),
|
||||
}
|
||||
return result
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import pretty
|
||||
|
||||
from flask_restx import Resource, Namespace, reqparse, fields
|
||||
|
@ -13,7 +12,7 @@ from subtitles.mass_download import episode_download_subtitles
|
|||
from app.event_handler import event_stream
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from ..utils import authenticate, postprocess
|
||||
|
||||
api_ns_episodes_blacklist = Namespace('Episodes Blacklist', description='List, add or remove subtitles to or from '
|
||||
'episodes blacklist')
|
||||
|
@ -59,18 +58,17 @@ class EpisodesBlacklist(Resource):
|
|||
TableBlacklist.timestamp)\
|
||||
.join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\
|
||||
.join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\
|
||||
.order_by(TableBlacklist.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
data = list(data)
|
||||
.order_by(TableBlacklist.timestamp.desc())
|
||||
if length > 0:
|
||||
data = data.limit(length).offset(start)
|
||||
data = list(data.dicts())
|
||||
|
||||
for item in data:
|
||||
# Make timestamp pretty
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
|
||||
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(item['timestamp'])})
|
||||
|
||||
postprocessEpisode(item)
|
||||
postprocess(item)
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ from flask_restx import Resource, Namespace, reqparse, fields
|
|||
from app.database import TableEpisodes
|
||||
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from ..utils import authenticate, postprocess
|
||||
|
||||
api_ns_episodes = Namespace('Episodes', description='List episodes metadata for specific series or episodes.')
|
||||
|
||||
|
@ -68,6 +68,6 @@ class Episodes(Resource):
|
|||
|
||||
result = list(result)
|
||||
for item in result:
|
||||
postprocessEpisode(item)
|
||||
postprocess(item)
|
||||
|
||||
return result
|
||||
|
|
|
@ -42,13 +42,14 @@ class EpisodesSubtitles(Resource):
|
|||
args = self.patch_request_parser.parse_args()
|
||||
sonarrSeriesId = args.get('seriesid')
|
||||
sonarrEpisodeId = args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language,
|
||||
TableShows.title) \
|
||||
episodeInfo = TableEpisodes.select(
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.sceneName,
|
||||
TableEpisodes.audio_language,
|
||||
TableShows.title) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get_or_none()
|
||||
|
||||
if not episodeInfo:
|
||||
|
@ -56,13 +57,13 @@ class EpisodesSubtitles(Resource):
|
|||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name'] or "None"
|
||||
sceneName = episodeInfo['sceneName'] or "None"
|
||||
|
||||
language = args.get('language')
|
||||
hi = args.get('hi').capitalize()
|
||||
forced = args.get('forced').capitalize()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
|
||||
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -73,23 +74,9 @@ class EpisodesSubtitles(Resource):
|
|||
title, 'series', profile_id=get_profile_id(episode_id=sonarrEpisodeId)))
|
||||
if result:
|
||||
result = result[0]
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
|
||||
subs_path)
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
history_log(1, sonarrSeriesId, sonarrEpisodeId, result)
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
|
||||
store_subtitles(result.path, episodePath)
|
||||
else:
|
||||
event_stream(type='episode', payload=sonarrEpisodeId)
|
||||
|
||||
|
@ -117,21 +104,22 @@ class EpisodesSubtitles(Resource):
|
|||
args = self.post_request_parser.parse_args()
|
||||
sonarrSeriesId = args.get('seriesid')
|
||||
sonarrEpisodeId = args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.path,
|
||||
TableEpisodes.audio_language) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get_or_none()
|
||||
|
||||
if not episodeInfo:
|
||||
return 'Episode not found', 404
|
||||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name'] or "None"
|
||||
audio_language = episodeInfo['audio_language']
|
||||
|
||||
audio_language = get_audio_profile_languages(episodeInfo['audio_language'])
|
||||
if len(audio_language) and isinstance(audio_language[0], dict):
|
||||
audio_language = audio_language[0]
|
||||
else:
|
||||
audio_language = {'name': '', 'code2': '', 'code3': ''}
|
||||
|
||||
language = args.get('language')
|
||||
forced = True if args.get('forced') == 'true' else False
|
||||
|
@ -148,8 +136,6 @@ class EpisodesSubtitles(Resource):
|
|||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
title=title,
|
||||
scene_name=sceneName,
|
||||
media_type='series',
|
||||
subtitle=subFile,
|
||||
audio_language=audio_language)
|
||||
|
@ -157,22 +143,12 @@ class EpisodesSubtitles(Resource):
|
|||
if not result:
|
||||
logging.debug(f"BAZARR unable to process subtitles for this episode: {episodePath}")
|
||||
else:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
subs_path = result[2]
|
||||
if hi:
|
||||
language_code = language + ":hi"
|
||||
elif forced:
|
||||
language_code = language + ":forced"
|
||||
else:
|
||||
language_code = language
|
||||
provider = "manual"
|
||||
score = 360
|
||||
history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score,
|
||||
subtitles_path=subs_path)
|
||||
history_log(4, sonarrSeriesId, sonarrEpisodeId, result, fake_provider=provider, fake_score=score)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
|
||||
store_subtitles(result.path, episodePath)
|
||||
|
||||
except OSError:
|
||||
pass
|
||||
|
@ -199,10 +175,10 @@ class EpisodesSubtitles(Resource):
|
|||
sonarrEpisodeId = args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.title,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.audio_language)\
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
|
||||
.dicts()\
|
||||
TableEpisodes.sceneName,
|
||||
TableEpisodes.audio_language) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get_or_none()
|
||||
|
||||
if not episodeInfo:
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import operator
|
||||
import pretty
|
||||
|
||||
from flask_restx import Resource, Namespace, reqparse, fields
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
from datetime import timedelta
|
||||
|
||||
from app.database import get_exclusion_clause, TableEpisodes, TableShows, TableHistory, TableBlacklist
|
||||
from app.config import settings
|
||||
from app.database import TableEpisodes, TableShows, TableHistory, TableBlacklist
|
||||
from subtitles.upgrade import get_upgradable_episode_subtitles
|
||||
from utilities.path_mappings import path_mappings
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from ..utils import authenticate, postprocess
|
||||
|
||||
api_ns_episodes_history = Namespace('Episodes History', description='List episodes history events')
|
||||
|
||||
|
@ -70,42 +67,15 @@ class EpisodesHistory(Resource):
|
|||
length = args.get('length')
|
||||
episodeid = args.get('episodeid')
|
||||
|
||||
upgradable_episodes_not_perfect = []
|
||||
if settings.general.getboolean('upgrade_subs'):
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)),
|
||||
(TableHistory.timestamp > minimum_timestamp),
|
||||
(TableHistory.score.is_null(False))]
|
||||
upgradable_episodes_conditions += get_exclusion_clause('series')
|
||||
upgradable_episodes = TableHistory.select(TableHistory.video_path,
|
||||
fn.MAX(TableHistory.timestamp).alias('timestamp'),
|
||||
TableHistory.score,
|
||||
TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType)\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(reduce(operator.and_, upgradable_episodes_conditions))\
|
||||
.group_by(TableHistory.video_path)\
|
||||
.dicts()
|
||||
upgradable_episodes = list(upgradable_episodes)
|
||||
for upgradable_episode in upgradable_episodes:
|
||||
if upgradable_episode['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_episode['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_episode['score']) < 360:
|
||||
upgradable_episodes_not_perfect.append(upgradable_episode)
|
||||
upgradable_episodes_not_perfect = get_upgradable_episode_subtitles()
|
||||
if len(upgradable_episodes_not_perfect):
|
||||
upgradable_episodes_not_perfect = [{"video_path": x['video_path'],
|
||||
"timestamp": x['timestamp'],
|
||||
"score": x['score'],
|
||||
"tags": x['tags'],
|
||||
"monitored": x['monitored'],
|
||||
"seriesType": x['seriesType']}
|
||||
for x in upgradable_episodes_not_perfect]
|
||||
|
||||
query_conditions = [(TableEpisodes.title.is_null(False))]
|
||||
if episodeid:
|
||||
|
@ -114,7 +84,8 @@ class EpisodesHistory(Resource):
|
|||
episode_history = TableHistory.select(TableHistory.id,
|
||||
TableShows.title.alias('seriesTitle'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
|
||||
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias(
|
||||
'episode_number'),
|
||||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableHistory.timestamp,
|
||||
TableHistory.subs_id,
|
||||
|
@ -129,15 +100,14 @@ class EpisodesHistory(Resource):
|
|||
TableHistory.subtitles_path,
|
||||
TableHistory.sonarrEpisodeId,
|
||||
TableHistory.provider,
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.where(query_condition)\
|
||||
.order_by(TableHistory.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
episode_history = list(episode_history)
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
|
||||
.where(query_condition) \
|
||||
.order_by(TableHistory.timestamp.desc())
|
||||
if length > 0:
|
||||
episode_history = episode_history.limit(length).offset(start)
|
||||
episode_history = list(episode_history.dicts())
|
||||
|
||||
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
|
||||
blacklist_db = list(blacklist_db)
|
||||
|
@ -145,7 +115,7 @@ class EpisodesHistory(Resource):
|
|||
for item in episode_history:
|
||||
# Mark episode as upgradable or not
|
||||
item.update({"upgradable": False})
|
||||
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
|
||||
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": item['score'],
|
||||
"tags": str(item['tags']), "monitored": str(item['monitored']),
|
||||
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: # noqa: E129
|
||||
if os.path.exists(path_mappings.path_replace(item['subtitles_path'])) and \
|
||||
|
@ -154,16 +124,16 @@ class EpisodesHistory(Resource):
|
|||
|
||||
del item['path']
|
||||
|
||||
postprocessEpisode(item)
|
||||
postprocess(item)
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
item["raw_timestamp"] = int(item['timestamp'])
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["raw_timestamp"])
|
||||
item["raw_timestamp"] = item['timestamp'].timestamp()
|
||||
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["timestamp"])
|
||||
|
||||
# Check if subtitles is blacklisted
|
||||
item.update({"blacklisted": False})
|
||||
|
@ -174,8 +144,8 @@ class EpisodesHistory(Resource):
|
|||
item.update({"blacklisted": True})
|
||||
break
|
||||
|
||||
count = TableHistory.select()\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
count = TableHistory.select() \
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
|
||||
.where(TableEpisodes.title.is_null(False)).count()
|
||||
|
||||
return {'data': episode_history, 'total': count}
|
||||
|
|
|
@ -8,7 +8,7 @@ from functools import reduce
|
|||
from app.database import get_exclusion_clause, TableEpisodes, TableShows
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessEpisode
|
||||
from ..utils import authenticate, postprocess
|
||||
|
||||
api_ns_episodes_wanted = Namespace('Episodes Wanted', description='List episodes wanted subtitles')
|
||||
|
||||
|
@ -65,7 +65,7 @@ class EpisodesWanted(Resource):
|
|||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name.alias('sceneName'),
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.tags,
|
||||
TableEpisodes.failedAttempts,
|
||||
TableShows.seriesType)\
|
||||
|
@ -82,20 +82,20 @@ class EpisodesWanted(Resource):
|
|||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name.alias('sceneName'),
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.tags,
|
||||
TableEpisodes.failedAttempts,
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.where(wanted_condition)\
|
||||
.order_by(TableEpisodes.rowid.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
.order_by(TableEpisodes.rowid.desc())
|
||||
if length > 0:
|
||||
data = data.limit(length).offset(start)
|
||||
data = data.dicts()
|
||||
data = list(data)
|
||||
|
||||
for item in data:
|
||||
postprocessEpisode(item)
|
||||
postprocess(item)
|
||||
|
||||
count_conditions = [(TableEpisodes.missing_subtitles != '[]')]
|
||||
count_conditions += get_exclusion_clause('series')
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
import datetime
|
||||
import operator
|
||||
import itertools
|
||||
|
@ -63,8 +62,8 @@ class HistoryStats(Resource):
|
|||
elif timeframe == 'week':
|
||||
delay = 6 * 24 * 60 * 60
|
||||
|
||||
now = time.time()
|
||||
past = now - delay
|
||||
now = datetime.datetime.now()
|
||||
past = now - datetime.timedelta(seconds=delay)
|
||||
|
||||
history_where_clauses = [(TableHistory.timestamp.between(past, now))]
|
||||
history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))]
|
||||
|
@ -92,7 +91,7 @@ class HistoryStats(Resource):
|
|||
.dicts()
|
||||
data_series = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
|
||||
itertools.groupby(list(data_series),
|
||||
key=lambda x: datetime.datetime.fromtimestamp(x['timestamp']).strftime(
|
||||
key=lambda x: x['timestamp'].strftime(
|
||||
'%Y-%m-%d'))]
|
||||
|
||||
data_movies = TableHistoryMovie.select(TableHistoryMovie.timestamp, TableHistoryMovie.id) \
|
||||
|
@ -100,7 +99,7 @@ class HistoryStats(Resource):
|
|||
.dicts()
|
||||
data_movies = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
|
||||
itertools.groupby(list(data_movies),
|
||||
key=lambda x: datetime.datetime.fromtimestamp(x['timestamp']).strftime(
|
||||
key=lambda x: x['timestamp'].strftime(
|
||||
'%Y-%m-%d'))]
|
||||
|
||||
for dt in rrule.rrule(rrule.DAILY,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import pretty
|
||||
|
||||
from flask_restx import Resource, Namespace, reqparse, fields
|
||||
|
@ -13,7 +12,7 @@ from subtitles.mass_download import movies_download_subtitles
|
|||
from app.event_handler import event_stream
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
from ..utils import authenticate, postprocess
|
||||
|
||||
api_ns_movies_blacklist = Namespace('Movies Blacklist', description='List, add or remove subtitles to or from '
|
||||
'movies blacklist')
|
||||
|
@ -54,18 +53,17 @@ class MoviesBlacklist(Resource):
|
|||
TableBlacklistMovie.language,
|
||||
TableBlacklistMovie.timestamp)\
|
||||
.join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\
|
||||
.order_by(TableBlacklistMovie.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
data = list(data)
|
||||
.order_by(TableBlacklistMovie.timestamp.desc())
|
||||
if length > 0:
|
||||
data = data.limit(length).offset(start)
|
||||
data = list(data.dicts())
|
||||
|
||||
for item in data:
|
||||
postprocessMovie(item)
|
||||
postprocess(item)
|
||||
|
||||
# Make timestamp pretty
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
|
||||
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
|
||||
item.update({'timestamp': pretty.date(item['timestamp'])})
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
# coding=utf-8
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import operator
|
||||
import pretty
|
||||
|
||||
from flask_restx import Resource, Namespace, reqparse, fields
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
from datetime import timedelta
|
||||
|
||||
from app.database import get_exclusion_clause, TableMovies, TableHistoryMovie, TableBlacklistMovie
|
||||
from app.config import settings
|
||||
from app.database import TableMovies, TableHistoryMovie, TableBlacklistMovie
|
||||
from subtitles.upgrade import get_upgradable_movies_subtitles
|
||||
from utilities.path_mappings import path_mappings
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
from api.utils import authenticate, postprocess
|
||||
|
||||
api_ns_movies_history = Namespace('Movies History', description='List movies history events')
|
||||
|
||||
|
@ -66,42 +63,14 @@ class MoviesHistory(Resource):
|
|||
length = args.get('length')
|
||||
radarrid = args.get('radarrid')
|
||||
|
||||
upgradable_movies = []
|
||||
upgradable_movies_not_perfect = []
|
||||
if settings.general.getboolean('upgrade_subs'):
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
||||
datetime.datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)),
|
||||
(TableHistoryMovie.timestamp > minimum_timestamp),
|
||||
(TableHistoryMovie.score.is_null(False))]
|
||||
upgradable_movies_conditions += get_exclusion_clause('movie')
|
||||
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
|
||||
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
|
||||
TableHistoryMovie.score,
|
||||
TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(reduce(operator.and_, upgradable_movies_conditions))\
|
||||
.group_by(TableHistoryMovie.video_path)\
|
||||
.dicts()
|
||||
upgradable_movies = list(upgradable_movies)
|
||||
|
||||
for upgradable_movie in upgradable_movies:
|
||||
if upgradable_movie['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_movie['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_movie['score']) < 120:
|
||||
upgradable_movies_not_perfect.append(upgradable_movie)
|
||||
upgradable_movies_not_perfect = get_upgradable_movies_subtitles()
|
||||
if len(upgradable_movies_not_perfect):
|
||||
upgradable_movies_not_perfect = [{"video_path": x['video_path'],
|
||||
"timestamp": x['timestamp'],
|
||||
"score": x['score'],
|
||||
"tags": x['tags'],
|
||||
"monitored": x['monitored']}
|
||||
for x in upgradable_movies_not_perfect]
|
||||
|
||||
query_conditions = [(TableMovies.title.is_null(False))]
|
||||
if radarrid:
|
||||
|
@ -122,14 +91,13 @@ class MoviesHistory(Resource):
|
|||
TableHistoryMovie.subs_id,
|
||||
TableHistoryMovie.provider,
|
||||
TableHistoryMovie.subtitles_path,
|
||||
TableHistoryMovie.video_path)\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(query_condition)\
|
||||
.order_by(TableHistoryMovie.timestamp.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
movie_history = list(movie_history)
|
||||
TableHistoryMovie.video_path) \
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
|
||||
.where(query_condition) \
|
||||
.order_by(TableHistoryMovie.timestamp.desc())
|
||||
if length > 0:
|
||||
movie_history = movie_history.limit(length).offset(start)
|
||||
movie_history = list(movie_history.dicts())
|
||||
|
||||
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
|
||||
blacklist_db = list(blacklist_db)
|
||||
|
@ -137,24 +105,25 @@ class MoviesHistory(Resource):
|
|||
for item in movie_history:
|
||||
# Mark movies as upgradable or not
|
||||
item.update({"upgradable": False})
|
||||
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
|
||||
"tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
|
||||
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": item['score'],
|
||||
"tags": str(item['tags']),
|
||||
"monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
|
||||
if os.path.exists(path_mappings.path_replace_movie(item['subtitles_path'])) and \
|
||||
os.path.exists(path_mappings.path_replace_movie(item['video_path'])):
|
||||
item.update({"upgradable": True})
|
||||
|
||||
del item['path']
|
||||
|
||||
postprocessMovie(item)
|
||||
postprocess(item)
|
||||
|
||||
if item['score']:
|
||||
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
|
||||
|
||||
# Make timestamp pretty
|
||||
if item['timestamp']:
|
||||
item["raw_timestamp"] = int(item['timestamp'])
|
||||
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["raw_timestamp"])
|
||||
item["raw_timestamp"] = item['timestamp'].timestamp()
|
||||
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
|
||||
item['timestamp'] = pretty.date(item["timestamp"])
|
||||
|
||||
# Check if subtitles is blacklisted
|
||||
item.update({"blacklisted": False})
|
||||
|
@ -165,9 +134,9 @@ class MoviesHistory(Resource):
|
|||
item.update({"blacklisted": True})
|
||||
break
|
||||
|
||||
count = TableHistoryMovie.select()\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(TableMovies.title.is_null(False))\
|
||||
count = TableHistoryMovie.select() \
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
|
||||
.where(TableMovies.title.is_null(False)) \
|
||||
.count()
|
||||
|
||||
return {'data': movie_history, 'total': count}
|
||||
|
|
|
@ -9,8 +9,7 @@ from subtitles.wanted import wanted_search_missing_subtitles_movies
|
|||
from subtitles.mass_download import movies_download_subtitles
|
||||
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessMovie, None_Keys
|
||||
|
||||
from api.utils import authenticate, None_Keys, postprocess
|
||||
|
||||
api_ns_movies = Namespace('Movies', description='List movies metadata, update movie languages profile or run actions '
|
||||
'for specific movies.')
|
||||
|
@ -82,10 +81,13 @@ class Movies(Resource):
|
|||
.order_by(TableMovies.sortTitle)\
|
||||
.dicts()
|
||||
else:
|
||||
result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts()
|
||||
result = TableMovies.select().order_by(TableMovies.sortTitle)
|
||||
if length > 0:
|
||||
result = result.limit(length).offset(start)
|
||||
result = result.dicts()
|
||||
result = list(result)
|
||||
for item in result:
|
||||
postprocessMovie(item)
|
||||
postprocess(item)
|
||||
|
||||
return {'data': result, 'total': count}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import logging
|
||||
|
||||
|
@ -20,7 +21,6 @@ from app.config import settings
|
|||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
api_ns_movies_subtitles = Namespace('Movies Subtitles', description='Download, upload or delete movies subtitles')
|
||||
|
||||
|
||||
|
@ -42,12 +42,13 @@ class MoviesSubtitles(Resource):
|
|||
args = self.patch_request_parser.parse_args()
|
||||
radarrId = args.get('radarrid')
|
||||
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.audio_language)\
|
||||
.where(TableMovies.radarrId == radarrId)\
|
||||
.dicts()\
|
||||
movieInfo = TableMovies.select(
|
||||
TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
TableMovies.audio_language) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
.get_or_none()
|
||||
|
||||
if not movieInfo:
|
||||
|
@ -57,44 +58,26 @@ class MoviesSubtitles(Resource):
|
|||
sceneName = movieInfo['sceneName'] or 'None'
|
||||
|
||||
title = movieInfo['title']
|
||||
audio_language = movieInfo['audio_language']
|
||||
|
||||
language = args.get('language')
|
||||
hi = args.get('hi').capitalize()
|
||||
forced = args.get('forced').capitalize()
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
|
||||
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
audio_language = None
|
||||
|
||||
try:
|
||||
with contextlib.suppress(OSError):
|
||||
result = list(generate_subtitles(moviePath, [(language, hi, forced)], audio_language,
|
||||
sceneName, title, 'movie', profile_id=get_profile_id(movie_id=radarrId)))
|
||||
if result:
|
||||
result = result[0]
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log_movie(1, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
history_log_movie(1, radarrId, result)
|
||||
store_subtitles_movie(result.path, moviePath)
|
||||
else:
|
||||
event_stream(type='movie', payload=radarrId)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return '', 204
|
||||
|
||||
# POST: Upload Subtitles
|
||||
|
@ -116,9 +99,7 @@ class MoviesSubtitles(Resource):
|
|||
# TODO: Support Multiply Upload
|
||||
args = self.post_request_parser.parse_args()
|
||||
radarrId = args.get('radarrid')
|
||||
movieInfo = TableMovies.select(TableMovies.title,
|
||||
TableMovies.path,
|
||||
TableMovies.sceneName,
|
||||
movieInfo = TableMovies.select(TableMovies.path,
|
||||
TableMovies.audio_language) \
|
||||
.where(TableMovies.radarrId == radarrId) \
|
||||
.dicts() \
|
||||
|
@ -128,14 +109,16 @@ class MoviesSubtitles(Resource):
|
|||
return 'Movie not found', 404
|
||||
|
||||
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
|
||||
sceneName = movieInfo['sceneName'] or 'None'
|
||||
|
||||
title = movieInfo['title']
|
||||
audioLanguage = movieInfo['audio_language']
|
||||
audio_language = get_audio_profile_languages(movieInfo['audio_language'])
|
||||
if len(audio_language) and isinstance(audio_language[0], dict):
|
||||
audio_language = audio_language[0]
|
||||
else:
|
||||
audio_language = {'name': '', 'code2': '', 'code3': ''}
|
||||
|
||||
language = args.get('language')
|
||||
forced = True if args.get('forced') == 'true' else False
|
||||
hi = True if args.get('hi') == 'true' else False
|
||||
forced = args.get('forced') == 'true'
|
||||
hi = args.get('hi') == 'true'
|
||||
subFile = args.get('file')
|
||||
|
||||
_, ext = os.path.splitext(subFile.filename)
|
||||
|
@ -143,38 +126,24 @@ class MoviesSubtitles(Resource):
|
|||
if not isinstance(ext, str) or ext.lower() not in SUBTITLE_EXTENSIONS:
|
||||
raise ValueError('A subtitle of an invalid format was uploaded.')
|
||||
|
||||
try:
|
||||
with contextlib.suppress(OSError):
|
||||
result = manual_upload_subtitle(path=moviePath,
|
||||
language=language,
|
||||
forced=forced,
|
||||
hi=hi,
|
||||
title=title,
|
||||
scene_name=sceneName,
|
||||
media_type='movie',
|
||||
subtitle=subFile,
|
||||
audio_language=audioLanguage)
|
||||
audio_language=audio_language)
|
||||
|
||||
if not result:
|
||||
logging.debug(f"BAZARR unable to process subtitles for this movie: {moviePath}")
|
||||
else:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
subs_path = result[2]
|
||||
if hi:
|
||||
language_code = language + ":hi"
|
||||
elif forced:
|
||||
language_code = language + ":forced"
|
||||
else:
|
||||
language_code = language
|
||||
provider = "manual"
|
||||
score = 120
|
||||
history_log_movie(4, radarrId, message, path, language_code, provider, score, subtitles_path=subs_path)
|
||||
history_log_movie(4, radarrId, result, fake_provider=provider, fake_score=score)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
send_notifications_movie(radarrId, result.message)
|
||||
store_subtitles_movie(result.path, moviePath)
|
||||
return '', 204
|
||||
|
||||
# DELETE: Delete Subtitles
|
||||
|
|
|
@ -8,7 +8,7 @@ from functools import reduce
|
|||
from app.database import get_exclusion_clause, TableMovies
|
||||
from api.swaggerui import subtitles_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessMovie
|
||||
from api.utils import authenticate, postprocess
|
||||
|
||||
|
||||
api_ns_movies_wanted = Namespace('Movies Wanted', description='List movies wanted subtitles')
|
||||
|
@ -75,14 +75,14 @@ class MoviesWanted(Resource):
|
|||
TableMovies.tags,
|
||||
TableMovies.monitored)\
|
||||
.where(wanted_condition)\
|
||||
.order_by(TableMovies.rowid.desc())\
|
||||
.limit(length)\
|
||||
.offset(start)\
|
||||
.dicts()
|
||||
.order_by(TableMovies.rowid.desc())
|
||||
if length > 0:
|
||||
result = result.limit(length).offset(start)
|
||||
result = result.dicts()
|
||||
result = list(result)
|
||||
|
||||
for item in result:
|
||||
postprocessMovie(item)
|
||||
postprocess(item)
|
||||
|
||||
count_conditions = [(TableMovies.missing_subtitles != '[]')]
|
||||
count_conditions += get_exclusion_clause('movie')
|
||||
|
|
|
@ -13,7 +13,6 @@ from subtitles.indexer.series import store_subtitles
|
|||
|
||||
from ..utils import authenticate
|
||||
|
||||
|
||||
api_ns_providers_episodes = Namespace('Providers Episodes', description='List and download episodes subtitles manually')
|
||||
|
||||
|
||||
|
@ -49,10 +48,10 @@ class ProviderEpisodes(Resource):
|
|||
args = self.get_request_parser.parse_args()
|
||||
sonarrEpisodeId = args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.title,
|
||||
TableShows.profileId) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
.get_or_none()
|
||||
|
@ -62,7 +61,7 @@ class ProviderEpisodes(Resource):
|
|||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name'] or "None"
|
||||
sceneName = episodeInfo['sceneName'] or "None"
|
||||
profileId = episodeInfo['profileId']
|
||||
|
||||
providers_list = get_providers()
|
||||
|
@ -92,9 +91,11 @@ class ProviderEpisodes(Resource):
|
|||
args = self.post_request_parser.parse_args()
|
||||
sonarrSeriesId = args.get('seriesid')
|
||||
sonarrEpisodeId = args.get('episodeid')
|
||||
episodeInfo = TableEpisodes.select(TableEpisodes.path,
|
||||
TableEpisodes.scene_name,
|
||||
TableShows.title) \
|
||||
episodeInfo = TableEpisodes.select(
|
||||
TableEpisodes.audio_language,
|
||||
TableEpisodes.path,
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.title) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
|
||||
.dicts() \
|
||||
|
@ -105,7 +106,7 @@ class ProviderEpisodes(Resource):
|
|||
|
||||
title = episodeInfo['title']
|
||||
episodePath = path_mappings.path_replace(episodeInfo['path'])
|
||||
sceneName = episodeInfo['scene_name'] or "None"
|
||||
sceneName = episodeInfo['sceneName'] or "None"
|
||||
|
||||
hi = args.get('hi').capitalize()
|
||||
forced = args.get('forced').capitalize()
|
||||
|
@ -113,7 +114,7 @@ class ProviderEpisodes(Resource):
|
|||
selected_provider = args.get('provider')
|
||||
subtitle = args.get('subtitle')
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
|
||||
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -123,26 +124,11 @@ class ProviderEpisodes(Resource):
|
|||
result = manual_download_subtitle(episodePath, audio_language, hi, forced, subtitle, selected_provider,
|
||||
sceneName, title, 'series', use_original_format,
|
||||
profile_id=get_profile_id(episode_id=sonarrEpisodeId))
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score, subs_id,
|
||||
subs_path)
|
||||
if result:
|
||||
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
||||
store_subtitles(path, episodePath)
|
||||
return result, 201
|
||||
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
|
||||
store_subtitles(result.path, episodePath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ class ProviderMovies(Resource):
|
|||
selected_provider = args.get('provider')
|
||||
subtitle = args.get('subtitle')
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
|
||||
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -121,23 +121,10 @@ class ProviderMovies(Resource):
|
|||
sceneName, title, 'movie', use_original_format,
|
||||
profile_id=get_profile_id(movie_id=radarrId))
|
||||
if result is not None:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
history_log_movie(2, radarrId, message, path, language_code, provider, score, subs_id, subs_path)
|
||||
history_log_movie(2, radarrId, result)
|
||||
if not settings.general.getboolean('dont_notify_manual_actions'):
|
||||
send_notifications_movie(radarrId, message)
|
||||
store_subtitles_movie(path, moviePath)
|
||||
send_notifications_movie(radarrId, result.message)
|
||||
store_subtitles_movie(result.path, moviePath)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import operator
|
|||
|
||||
from flask_restx import Resource, Namespace, reqparse, fields
|
||||
from functools import reduce
|
||||
from peewee import fn, JOIN
|
||||
|
||||
from app.database import get_exclusion_clause, TableEpisodes, TableShows
|
||||
from subtitles.indexer.series import list_missing_subtitles, series_scan_subtitles
|
||||
|
@ -12,8 +13,7 @@ from subtitles.wanted import wanted_search_missing_subtitles_series
|
|||
from app.event_handler import event_stream
|
||||
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
|
||||
|
||||
from ..utils import authenticate, postprocessSeries, None_Keys
|
||||
|
||||
from api.utils import authenticate, None_Keys, postprocess
|
||||
|
||||
api_ns_series = Namespace('Series', description='List series metadata, update series languages profile or run actions '
|
||||
'for specific series.')
|
||||
|
@ -34,8 +34,8 @@ class Series(Resource):
|
|||
data_model = api_ns_series.model('series_data_model', {
|
||||
'alternativeTitles': fields.List(fields.String),
|
||||
'audio_language': fields.Nested(get_audio_language_model),
|
||||
'episodeFileCount': fields.Integer(),
|
||||
'episodeMissingCount': fields.Integer(),
|
||||
'episodeFileCount': fields.Integer(default=0),
|
||||
'episodeMissingCount': fields.Integer(default=0),
|
||||
'fanart': fields.String(),
|
||||
'imdbId': fields.String(),
|
||||
'monitored': fields.Boolean(),
|
||||
|
@ -70,40 +70,37 @@ class Series(Resource):
|
|||
seriesId = args.get('seriesid[]')
|
||||
|
||||
count = TableShows.select().count()
|
||||
episodeFileCount = TableEpisodes.select(TableShows.sonarrSeriesId,
|
||||
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeFileCount')) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.group_by(TableShows.sonarrSeriesId).alias('episodeFileCount')
|
||||
|
||||
episodes_missing_conditions = [(TableEpisodes.missing_subtitles != '[]')]
|
||||
episodes_missing_conditions += get_exclusion_clause('series')
|
||||
|
||||
episodeMissingCount = (TableEpisodes.select(TableShows.sonarrSeriesId,
|
||||
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeMissingCount'))
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))
|
||||
.where(reduce(operator.and_, episodes_missing_conditions)).group_by(
|
||||
TableShows.sonarrSeriesId).alias('episodeMissingCount'))
|
||||
|
||||
result = TableShows.select(TableShows, episodeFileCount.c.episodeFileCount,
|
||||
episodeMissingCount.c.episodeMissingCount).join(episodeFileCount,
|
||||
join_type=JOIN.LEFT_OUTER, on=(
|
||||
TableShows.sonarrSeriesId ==
|
||||
episodeFileCount.c.sonarrSeriesId)
|
||||
) \
|
||||
.join(episodeMissingCount, join_type=JOIN.LEFT_OUTER,
|
||||
on=(TableShows.sonarrSeriesId == episodeMissingCount.c.sonarrSeriesId)).order_by(TableShows.sortTitle)
|
||||
|
||||
if len(seriesId) != 0:
|
||||
result = TableShows.select() \
|
||||
.where(TableShows.sonarrSeriesId.in_(seriesId)) \
|
||||
.order_by(TableShows.sortTitle).dicts()
|
||||
else:
|
||||
result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts()
|
||||
|
||||
result = list(result)
|
||||
result = result.where(TableShows.sonarrSeriesId.in_(seriesId))
|
||||
elif length > 0:
|
||||
result = result.limit(length).offset(start)
|
||||
result = list(result.dicts())
|
||||
|
||||
for item in result:
|
||||
postprocessSeries(item)
|
||||
|
||||
# Add missing subtitles episode count
|
||||
episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']),
|
||||
(TableEpisodes.missing_subtitles != '[]')]
|
||||
episodes_missing_conditions += get_exclusion_clause('series')
|
||||
|
||||
episodeMissingCount = TableEpisodes.select(TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(reduce(operator.and_, episodes_missing_conditions)) \
|
||||
.count()
|
||||
item.update({"episodeMissingCount": episodeMissingCount})
|
||||
|
||||
# Add episode count
|
||||
episodeFileCount = TableEpisodes.select(TableShows.tags,
|
||||
TableEpisodes.monitored,
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']) \
|
||||
.count()
|
||||
item.update({"episodeFileCount": episodeFileCount})
|
||||
postprocess(item)
|
||||
|
||||
return {'data': result, 'total': count}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import gc
|
|||
from flask_restx import Resource, Namespace, reqparse
|
||||
|
||||
from app.database import TableEpisodes, TableMovies
|
||||
from languages.get_languages import alpha3_from_alpha2
|
||||
from utilities.path_mappings import path_mappings
|
||||
from subtitles.tools.subsyncer import SubSyncer
|
||||
from subtitles.tools.translate import translate_subtitles_file
|
||||
|
@ -81,7 +82,7 @@ class Subtitles(Resource):
|
|||
del subsync
|
||||
gc.collect()
|
||||
elif action == 'translate':
|
||||
from_language = os.path.splitext(subtitles_path)[0].rsplit(".", 1)[1].replace('_', '-')
|
||||
from_language = subtitles_lang_from_filename(subtitles_path)
|
||||
dest_language = language
|
||||
forced = True if args.get('forced') == 'true' else False
|
||||
hi = True if args.get('hi') == 'true' else False
|
||||
|
@ -93,7 +94,8 @@ class Subtitles(Resource):
|
|||
radarr_id=id)
|
||||
else:
|
||||
use_original_format = True if args.get('original_format') == 'true' else False
|
||||
subtitles_apply_mods(language, subtitles_path, [action], use_original_format)
|
||||
subtitles_apply_mods(language=language, subtitle_path=subtitles_path, mods=[action],
|
||||
use_original_format=use_original_format, video_path=video_path)
|
||||
|
||||
# apply chmod if required
|
||||
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
||||
|
@ -110,3 +112,25 @@ class Subtitles(Resource):
|
|||
event_stream(type='movie', payload=int(id))
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
def subtitles_lang_from_filename(path):
|
||||
split_extensionless_path = os.path.splitext(path.lower())[0].rsplit(".", 2)
|
||||
|
||||
if len(split_extensionless_path) < 2:
|
||||
return None
|
||||
elif len(split_extensionless_path) == 2:
|
||||
return_lang = split_extensionless_path[-1]
|
||||
else:
|
||||
first_ext = split_extensionless_path[-1]
|
||||
second_ext = split_extensionless_path[-2]
|
||||
|
||||
if first_ext in ['hi', 'sdh', 'cc']:
|
||||
if alpha3_from_alpha2(second_ext):
|
||||
return_lang = second_ext
|
||||
else:
|
||||
return first_ext
|
||||
else:
|
||||
return_lang = first_ext
|
||||
|
||||
return return_lang.replace('_', '-')
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
from .system import api_ns_system
|
||||
from .searches import api_ns_system_searches
|
||||
from .account import api_ns_system_account
|
||||
from .announcements import api_ns_system_announcements
|
||||
from .backups import api_ns_system_backups
|
||||
from .tasks import api_ns_system_tasks
|
||||
from .logs import api_ns_system_logs
|
||||
|
@ -17,6 +18,7 @@ from .notifications import api_ns_system_notifications
|
|||
api_ns_list_system = [
|
||||
api_ns_system,
|
||||
api_ns_system_account,
|
||||
api_ns_system_announcements,
|
||||
api_ns_system_backups,
|
||||
api_ns_system_health,
|
||||
api_ns_system_languages,
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
# coding=utf-8
|
||||
|
||||
from flask_restx import Resource, Namespace, reqparse
|
||||
|
||||
from app.announcements import get_all_announcements, mark_announcement_as_dismissed
|
||||
|
||||
from ..utils import authenticate
|
||||
|
||||
api_ns_system_announcements = Namespace('System Announcements', description='List announcements relative to Bazarr')
|
||||
|
||||
|
||||
@api_ns_system_announcements.route('system/announcements')
|
||||
class SystemAnnouncements(Resource):
|
||||
@authenticate
|
||||
@api_ns_system_announcements.doc(parser=None)
|
||||
@api_ns_system_announcements.response(200, 'Success')
|
||||
@api_ns_system_announcements.response(401, 'Not Authenticated')
|
||||
def get(self):
|
||||
"""List announcements relative to Bazarr"""
|
||||
return {'data': get_all_announcements()}
|
||||
|
||||
post_request_parser = reqparse.RequestParser()
|
||||
post_request_parser.add_argument('hash', type=str, required=True, help='hash of the announcement to dismiss')
|
||||
|
||||
@authenticate
|
||||
@api_ns_system_announcements.doc(parser=post_request_parser)
|
||||
@api_ns_system_announcements.response(204, 'Success')
|
||||
@api_ns_system_announcements.response(401, 'Not Authenticated')
|
||||
def post(self):
|
||||
"""Mark announcement as dismissed"""
|
||||
args = self.post_request_parser.parse_args()
|
||||
hashed_announcement = args.get('hash')
|
||||
|
||||
mark_announcement_as_dismissed(hashed_announcement=hashed_announcement)
|
||||
return '', 204
|
|
@ -6,7 +6,7 @@ from flask import request, jsonify
|
|||
from flask_restx import Resource, Namespace
|
||||
|
||||
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, \
|
||||
TableSettingsNotifier
|
||||
TableSettingsNotifier, update_profile_id_list
|
||||
from app.event_handler import event_stream
|
||||
from app.config import settings, save_settings, get_settings
|
||||
from app.scheduler import scheduler
|
||||
|
@ -92,6 +92,9 @@ class SystemSettings(Resource):
|
|||
# Remove deleted profiles
|
||||
TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute()
|
||||
|
||||
# invalidate cache
|
||||
update_profile_id_list.invalidate()
|
||||
|
||||
event_stream("languages")
|
||||
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
|
|
|
@ -36,178 +36,61 @@ def authenticate(actual_method):
|
|||
|
||||
def postprocess(item):
|
||||
# Remove ffprobe_cache
|
||||
if 'ffprobe_cache' in item:
|
||||
del (item['ffprobe_cache'])
|
||||
if item.get('movie_file_id'):
|
||||
path_replace = path_mappings.path_replace_movie
|
||||
else:
|
||||
path_replace = path_mappings.path_replace
|
||||
if item.get('ffprobe_cache'):
|
||||
del item['ffprobe_cache']
|
||||
|
||||
# Parse tags
|
||||
if 'tags' in item:
|
||||
if item['tags'] is None:
|
||||
item['tags'] = []
|
||||
else:
|
||||
item['tags'] = ast.literal_eval(item['tags'])
|
||||
|
||||
if 'monitored' in item:
|
||||
if item['monitored'] is None:
|
||||
item['monitored'] = False
|
||||
else:
|
||||
item['monitored'] = item['monitored'] == 'True'
|
||||
|
||||
if 'hearing_impaired' in item and item['hearing_impaired'] is not None:
|
||||
if item['hearing_impaired'] is None:
|
||||
item['hearing_impaired'] = False
|
||||
else:
|
||||
item['hearing_impaired'] = item['hearing_impaired'] == 'True'
|
||||
|
||||
if 'language' in item:
|
||||
if item['language'] == 'None':
|
||||
item['language'] = None
|
||||
elif item['language'] is not None:
|
||||
splitted_language = item['language'].split(':')
|
||||
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
|
||||
"code2": splitted_language[0],
|
||||
"code3": alpha3_from_alpha2(splitted_language[0]),
|
||||
"forced": True if item['language'].endswith(':forced') else False,
|
||||
"hi": True if item['language'].endswith(':hi') else False}
|
||||
|
||||
|
||||
def postprocessSeries(item):
|
||||
postprocess(item)
|
||||
# Parse audio language
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId'])
|
||||
if item.get('audio_language'):
|
||||
item['audio_language'] = get_audio_profile_languages(item['audio_language'])
|
||||
|
||||
if 'alternateTitles' in item:
|
||||
if item['alternateTitles'] is None:
|
||||
item['alternativeTitles'] = []
|
||||
else:
|
||||
item['alternativeTitles'] = ast.literal_eval(item['alternateTitles'])
|
||||
del item["alternateTitles"]
|
||||
|
||||
# Parse seriesType
|
||||
if 'seriesType' in item and item['seriesType'] is not None:
|
||||
item['seriesType'] = item['seriesType'].capitalize()
|
||||
|
||||
if 'path' in item:
|
||||
item['path'] = path_mappings.path_replace(item['path'])
|
||||
|
||||
# map poster and fanart to server proxy
|
||||
if 'poster' in item:
|
||||
poster = item['poster']
|
||||
item['poster'] = f"{base_url}/images/series{poster}" if poster else None
|
||||
|
||||
if 'fanart' in item:
|
||||
fanart = item['fanart']
|
||||
item['fanart'] = f"{base_url}/images/series{fanart}" if fanart else None
|
||||
|
||||
|
||||
def postprocessEpisode(item):
|
||||
postprocess(item)
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId'])
|
||||
|
||||
if 'subtitles' in item:
|
||||
if item['subtitles'] is None:
|
||||
raw_subtitles = []
|
||||
else:
|
||||
raw_subtitles = ast.literal_eval(item['subtitles'])
|
||||
subtitles = []
|
||||
|
||||
for subs in raw_subtitles:
|
||||
subtitle = subs[0].split(':')
|
||||
sub = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"path": path_mappings.path_replace(subs[1]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(subtitle) > 1:
|
||||
sub["forced"] = True if subtitle[1] == 'forced' else False
|
||||
sub["hi"] = True if subtitle[1] == 'hi' else False
|
||||
|
||||
subtitles.append(sub)
|
||||
|
||||
item.update({"subtitles": subtitles})
|
||||
|
||||
# Parse missing subtitles
|
||||
if 'missing_subtitles' in item:
|
||||
if item['missing_subtitles'] is None:
|
||||
item['missing_subtitles'] = []
|
||||
else:
|
||||
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
|
||||
for i, subs in enumerate(item['missing_subtitles']):
|
||||
subtitle = subs.split(':')
|
||||
item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]),
|
||||
"code2": subtitle[0],
|
||||
"code3": alpha3_from_alpha2(subtitle[0]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(subtitle) > 1:
|
||||
item['missing_subtitles'][i].update({
|
||||
"forced": True if subtitle[1] == 'forced' else False,
|
||||
"hi": True if subtitle[1] == 'hi' else False
|
||||
})
|
||||
|
||||
if 'scene_name' in item:
|
||||
item["sceneName"] = item["scene_name"]
|
||||
del item["scene_name"]
|
||||
|
||||
if 'path' in item and item['path']:
|
||||
# Provide mapped path
|
||||
item['path'] = path_mappings.path_replace(item['path'])
|
||||
|
||||
|
||||
# TODO: Move
|
||||
def postprocessMovie(item):
|
||||
postprocess(item)
|
||||
# Parse audio language
|
||||
if 'audio_language' in item and item['audio_language'] is not None:
|
||||
item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId'])
|
||||
# Make sure profileId is a valid None value
|
||||
if item.get('profileId') in None_Keys:
|
||||
item['profileId'] = None
|
||||
|
||||
# Parse alternate titles
|
||||
if 'alternativeTitles' in item:
|
||||
if item['alternativeTitles'] is None:
|
||||
item['alternativeTitles'] = []
|
||||
else:
|
||||
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
|
||||
if item.get('alternativeTitles'):
|
||||
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
|
||||
else:
|
||||
item['alternativeTitles'] = []
|
||||
|
||||
# Parse failed attempts
|
||||
if 'failedAttempts' in item:
|
||||
if item['failedAttempts']:
|
||||
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
|
||||
if item.get('failedAttempts'):
|
||||
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
|
||||
else:
|
||||
item['failedAttempts'] = []
|
||||
|
||||
# Parse subtitles
|
||||
if 'subtitles' in item:
|
||||
if item['subtitles'] is None:
|
||||
item['subtitles'] = []
|
||||
else:
|
||||
item['subtitles'] = ast.literal_eval(item['subtitles'])
|
||||
if item.get('subtitles'):
|
||||
item['subtitles'] = ast.literal_eval(item['subtitles'])
|
||||
for i, subs in enumerate(item['subtitles']):
|
||||
language = subs[0].split(':')
|
||||
item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]),
|
||||
item['subtitles'][i] = {"path": path_replace(subs[1]),
|
||||
"name": language_from_alpha2(language[0]),
|
||||
"code2": language[0],
|
||||
"code3": alpha3_from_alpha2(language[0]),
|
||||
"forced": False,
|
||||
"hi": False}
|
||||
if len(language) > 1:
|
||||
item['subtitles'][i].update({
|
||||
"forced": True if language[1] == 'forced' else False,
|
||||
"hi": True if language[1] == 'hi' else False
|
||||
})
|
||||
|
||||
if settings.general.getboolean('embedded_subs_show_desired'):
|
||||
item['subtitles'][i].update(
|
||||
{
|
||||
"forced": language[1] == 'forced',
|
||||
"hi": language[1] == 'hi',
|
||||
}
|
||||
)
|
||||
if settings.general.getboolean('embedded_subs_show_desired') and item.get('profileId'):
|
||||
desired_lang_list = get_desired_languages(item['profileId'])
|
||||
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
|
||||
|
||||
if item['subtitles']:
|
||||
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
|
||||
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
|
||||
else:
|
||||
item['subtitles'] = []
|
||||
|
||||
# Parse missing subtitles
|
||||
if 'missing_subtitles' in item:
|
||||
if item['missing_subtitles'] is None:
|
||||
item['missing_subtitles'] = []
|
||||
else:
|
||||
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
|
||||
if item.get('missing_subtitles'):
|
||||
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
|
||||
for i, subs in enumerate(item['missing_subtitles']):
|
||||
language = subs.split(':')
|
||||
item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]),
|
||||
|
@ -216,25 +99,58 @@ def postprocessMovie(item):
|
|||
"forced": False,
|
||||
"hi": False}
|
||||
if len(language) > 1:
|
||||
item['missing_subtitles'][i].update({
|
||||
"forced": True if language[1] == 'forced' else False,
|
||||
"hi": True if language[1] == 'hi' else False
|
||||
})
|
||||
item['missing_subtitles'][i].update(
|
||||
{
|
||||
"forced": language[1] == 'forced',
|
||||
"hi": language[1] == 'hi',
|
||||
}
|
||||
)
|
||||
else:
|
||||
item['missing_subtitles'] = []
|
||||
|
||||
# Provide mapped path
|
||||
if 'path' in item:
|
||||
if item['path']:
|
||||
item['path'] = path_mappings.path_replace_movie(item['path'])
|
||||
# Parse tags
|
||||
if item.get('tags') is not None:
|
||||
item['tags'] = ast.literal_eval(item.get('tags', '[]'))
|
||||
else:
|
||||
item['tags'] = []
|
||||
if item.get('monitored'):
|
||||
item['monitored'] = item.get('monitored') == 'True'
|
||||
else:
|
||||
item['monitored'] = False
|
||||
if item.get('hearing_impaired'):
|
||||
item['hearing_impaired'] = item.get('hearing_impaired') == 'True'
|
||||
else:
|
||||
item['hearing_impaired'] = False
|
||||
|
||||
if 'subtitles_path' in item:
|
||||
if item.get('language'):
|
||||
if item['language'] == 'None':
|
||||
item['language'] = None
|
||||
if item['language'] is not None:
|
||||
splitted_language = item['language'].split(':')
|
||||
item['language'] = {
|
||||
"name": language_from_alpha2(splitted_language[0]),
|
||||
"code2": splitted_language[0],
|
||||
"code3": alpha3_from_alpha2(splitted_language[0]),
|
||||
"forced": bool(item['language'].endswith(':forced')),
|
||||
"hi": bool(item['language'].endswith(':hi')),
|
||||
}
|
||||
|
||||
# Parse seriesType
|
||||
if item.get('seriesType'):
|
||||
item['seriesType'] = item['seriesType'].capitalize()
|
||||
|
||||
if item.get('path'):
|
||||
item['path'] = path_replace(item['path'])
|
||||
|
||||
if item.get('subtitles_path'):
|
||||
# Provide mapped subtitles path
|
||||
item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path'])
|
||||
item['subtitles_path'] = path_replace(item['subtitles_path'])
|
||||
|
||||
# map poster and fanart to server proxy
|
||||
if 'poster' in item:
|
||||
if item.get('poster') is not None:
|
||||
poster = item['poster']
|
||||
item['poster'] = f"{base_url}/images/movies{poster}" if poster else None
|
||||
item['poster'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{poster}" if poster else None
|
||||
|
||||
if 'fanart' in item:
|
||||
if item.get('fanart') is not None:
|
||||
fanart = item['fanart']
|
||||
item['fanart'] = f"{base_url}/images/movies{fanart}" if fanart else None
|
||||
item['fanart'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{fanart}" if fanart else None
|
||||
|
|
|
@ -0,0 +1,113 @@
|
|||
# coding=utf-8
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import requests
|
||||
import logging
|
||||
import json
|
||||
import pretty
|
||||
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
|
||||
from app.get_providers import get_enabled_providers
|
||||
from app.database import TableAnnouncements
|
||||
from .get_args import args
|
||||
|
||||
|
||||
# Announcements as receive by browser must be in the form of a list of dicts converted to JSON
|
||||
# [
|
||||
# {
|
||||
# 'text': 'some text',
|
||||
# 'link': 'http://to.somewhere.net',
|
||||
# 'hash': '',
|
||||
# 'dismissible': True,
|
||||
# 'timestamp': 1676236978,
|
||||
# 'enabled': True,
|
||||
# },
|
||||
# ]
|
||||
|
||||
|
||||
def parse_announcement_dict(announcement_dict):
|
||||
announcement_dict['timestamp'] = pretty.date(announcement_dict['timestamp'])
|
||||
announcement_dict['link'] = announcement_dict.get('link', '')
|
||||
announcement_dict['dismissible'] = announcement_dict.get('dismissible', True)
|
||||
announcement_dict['enabled'] = announcement_dict.get('enabled', True)
|
||||
announcement_dict['hash'] = hashlib.sha256(announcement_dict['text'].encode('UTF8')).hexdigest()
|
||||
|
||||
return announcement_dict
|
||||
|
||||
|
||||
def get_announcements_to_file():
|
||||
try:
|
||||
r = requests.get("https://raw.githubusercontent.com/morpheus65535/bazarr-binaries/master/announcements.json")
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("Error trying to get announcements from Github. Http error.")
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("Error trying to get announcements from Github. Connection Error.")
|
||||
except requests.exceptions.Timeout:
|
||||
logging.exception("Error trying to get announcements from Github. Timeout Error.")
|
||||
except requests.exceptions.RequestException:
|
||||
logging.exception("Error trying to get announcements from Github.")
|
||||
else:
|
||||
with open(os.path.join(args.config_dir, 'config', 'announcements.json'), 'wb') as f:
|
||||
f.write(r.content)
|
||||
|
||||
|
||||
def get_online_announcements():
|
||||
try:
|
||||
with open(os.path.join(args.config_dir, 'config', 'announcements.json'), 'r') as f:
|
||||
data = json.load(f)
|
||||
except (OSError, json.JSONDecodeError):
|
||||
return []
|
||||
else:
|
||||
for announcement in data['data']:
|
||||
if 'enabled' not in announcement:
|
||||
data['data'][announcement]['enabled'] = True
|
||||
if 'dismissible' not in announcement:
|
||||
data['data'][announcement]['dismissible'] = True
|
||||
|
||||
return data['data']
|
||||
|
||||
|
||||
def get_local_announcements():
|
||||
announcements = []
|
||||
|
||||
# opensubtitles.org end-of-life
|
||||
enabled_providers = get_enabled_providers()
|
||||
if enabled_providers and 'opensubtitles' in enabled_providers:
|
||||
announcements.append({
|
||||
'text': 'Opensubtitles.org will be deprecated soon, migrate to Opensubtitles.com ASAP and disable this '
|
||||
'provider to remove this announcement.',
|
||||
'link': 'https://wiki.bazarr.media/Troubleshooting/OpenSubtitles-migration/',
|
||||
'dismissible': False,
|
||||
'timestamp': 1676236978,
|
||||
})
|
||||
|
||||
for announcement in announcements:
|
||||
if 'enabled' not in announcement:
|
||||
announcement['enabled'] = True
|
||||
if 'dismissible' not in announcement:
|
||||
announcement['dismissible'] = True
|
||||
|
||||
return announcements
|
||||
|
||||
|
||||
def get_all_announcements():
|
||||
# get announcements that haven't been dismissed yet
|
||||
announcements = [parse_announcement_dict(x) for x in get_online_announcements() + get_local_announcements() if
|
||||
x['enabled'] and (not x['dismissible'] or not TableAnnouncements.select()
|
||||
.where(TableAnnouncements.hash ==
|
||||
hashlib.sha256(x['text'].encode('UTF8')).hexdigest()).get_or_none())]
|
||||
|
||||
return sorted(announcements, key=itemgetter('timestamp'), reverse=True)
|
||||
|
||||
|
||||
def mark_announcement_as_dismissed(hashed_announcement):
|
||||
text = [x['text'] for x in get_all_announcements() if x['hash'] == hashed_announcement]
|
||||
if text:
|
||||
TableAnnouncements.insert({TableAnnouncements.hash: hashed_announcement,
|
||||
TableAnnouncements.timestamp: datetime.now(),
|
||||
TableAnnouncements.text: text[0]})\
|
||||
.on_conflict_ignore(ignore=True)\
|
||||
.execute()
|
|
@ -5,6 +5,7 @@ from flask import Flask, redirect
|
|||
from flask_cors import CORS
|
||||
from flask_socketio import SocketIO
|
||||
|
||||
from .database import database
|
||||
from .get_args import args
|
||||
from .config import settings, base_url
|
||||
|
||||
|
@ -37,6 +38,19 @@ def create_app():
|
|||
def page_not_found(_):
|
||||
return redirect(base_url, code=302)
|
||||
|
||||
# This hook ensures that a connection is opened to handle any queries
|
||||
# generated by the request.
|
||||
@app.before_request
|
||||
def _db_connect():
|
||||
database.connect()
|
||||
|
||||
# This hook ensures that the connection is closed when we've finished
|
||||
# processing the request.
|
||||
@app.teardown_request
|
||||
def _db_close(exc):
|
||||
if not database.is_closed():
|
||||
database.close()
|
||||
|
||||
return app
|
||||
|
||||
|
||||
|
|
|
@ -74,12 +74,15 @@ defaults = {
|
|||
'days_to_upgrade_subs': '7',
|
||||
'upgrade_manual': 'True',
|
||||
'anti_captcha_provider': 'None',
|
||||
'wanted_search_frequency': '3',
|
||||
'wanted_search_frequency_movie': '3',
|
||||
'wanted_search_frequency': '6',
|
||||
'wanted_search_frequency_movie': '6',
|
||||
'subzero_mods': '[]',
|
||||
'dont_notify_manual_actions': 'False',
|
||||
'hi_extension': 'hi',
|
||||
'embedded_subtitles_parser': 'ffprobe'
|
||||
'embedded_subtitles_parser': 'ffprobe',
|
||||
'default_und_audio_lang': '',
|
||||
'default_und_embedded_subtitles_lang': '',
|
||||
'parse_embedded_audio_track': 'False'
|
||||
},
|
||||
'auth': {
|
||||
'type': 'None',
|
||||
|
@ -101,6 +104,7 @@ defaults = {
|
|||
'port': '8989',
|
||||
'base_url': '/',
|
||||
'ssl': 'False',
|
||||
'http_timeout': '60',
|
||||
'apikey': '',
|
||||
'full_update': 'Daily',
|
||||
'full_update_day': '6',
|
||||
|
@ -119,6 +123,7 @@ defaults = {
|
|||
'port': '7878',
|
||||
'base_url': '/',
|
||||
'ssl': 'False',
|
||||
'http_timeout': '60',
|
||||
'apikey': '',
|
||||
'full_update': 'Daily',
|
||||
'full_update_day': '6',
|
||||
|
@ -161,6 +166,9 @@ defaults = {
|
|||
'podnapisi': {
|
||||
'verify_ssl': 'True'
|
||||
},
|
||||
'subf2m': {
|
||||
'verify_ssl': 'True'
|
||||
},
|
||||
'legendasdivx': {
|
||||
'username': '',
|
||||
'password': '',
|
||||
|
@ -259,6 +267,14 @@ defaults = {
|
|||
"streaming_service": 1,
|
||||
"edition": 1,
|
||||
"hearing_impaired": 1,
|
||||
},
|
||||
'postgresql': {
|
||||
'enabled': 'False',
|
||||
'host': 'localhost',
|
||||
'port': '5432',
|
||||
'database': '',
|
||||
'username': '',
|
||||
'password': '',
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,6 +318,12 @@ settings.radarr.base_url = base_url_slash_cleaner(uri=settings.radarr.base_url)
|
|||
if settings.general.page_size not in ['25', '50', '100', '250', '500', '1000']:
|
||||
settings.general.page_size = defaults['general']['page_size']
|
||||
|
||||
# increase delay between searches to reduce impact on providers
|
||||
if settings.general.wanted_search_frequency == '3':
|
||||
settings.general.wanted_search_frequency = '6'
|
||||
if settings.general.wanted_search_frequency_movie == '3':
|
||||
settings.general.wanted_search_frequency_movie = '6'
|
||||
|
||||
# save updated settings to file
|
||||
if os.path.exists(os.path.join(args.config_dir, 'config', 'config.ini')):
|
||||
with open(os.path.join(args.config_dir, 'config', 'config.ini'), 'w+') as handle:
|
||||
|
@ -362,6 +384,9 @@ def save_settings(settings_items):
|
|||
sonarr_exclusion_updated = False
|
||||
radarr_exclusion_updated = False
|
||||
use_embedded_subs_changed = False
|
||||
undefined_audio_track_default_changed = False
|
||||
undefined_subtitles_track_default_changed = False
|
||||
audio_tracks_parsing_changed = False
|
||||
|
||||
# Subzero Mods
|
||||
update_subzero = False
|
||||
|
@ -397,6 +422,15 @@ def save_settings(settings_items):
|
|||
'settings-general-ignore_vobsub_subs', 'settings-general-ignore_ass_subs']:
|
||||
use_embedded_subs_changed = True
|
||||
|
||||
if key == 'settings-general-default_und_audio_lang':
|
||||
undefined_audio_track_default_changed = True
|
||||
|
||||
if key == 'settings-general-parse_embedded_audio_track':
|
||||
audio_tracks_parsing_changed = True
|
||||
|
||||
if key == 'settings-general-default_und_embedded_subtitles_lang':
|
||||
undefined_subtitles_track_default_changed = True
|
||||
|
||||
if key in ['settings-general-base_url', 'settings-sonarr-base_url', 'settings-radarr-base_url']:
|
||||
value = base_url_slash_cleaner(value)
|
||||
|
||||
|
@ -518,7 +552,7 @@ def save_settings(settings_items):
|
|||
|
||||
update_subzero = True
|
||||
|
||||
if use_embedded_subs_changed:
|
||||
if use_embedded_subs_changed or undefined_audio_track_default_changed:
|
||||
from .scheduler import scheduler
|
||||
from subtitles.indexer.series import list_missing_subtitles
|
||||
from subtitles.indexer.movies import list_missing_subtitles_movies
|
||||
|
@ -527,6 +561,26 @@ def save_settings(settings_items):
|
|||
if settings.general.getboolean('use_radarr'):
|
||||
scheduler.add_job(list_missing_subtitles_movies, kwargs={'send_event': True})
|
||||
|
||||
if undefined_subtitles_track_default_changed:
|
||||
from .scheduler import scheduler
|
||||
from subtitles.indexer.series import series_full_scan_subtitles
|
||||
from subtitles.indexer.movies import movies_full_scan_subtitles
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
scheduler.add_job(series_full_scan_subtitles, kwargs={'use_cache': True})
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
scheduler.add_job(movies_full_scan_subtitles, kwargs={'use_cache': True})
|
||||
|
||||
if audio_tracks_parsing_changed:
|
||||
from .scheduler import scheduler
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
from sonarr.sync.episodes import sync_episodes
|
||||
from sonarr.sync.series import update_series
|
||||
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
|
||||
scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1)
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
from radarr.sync.movies import update_movies
|
||||
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)
|
||||
|
||||
if update_subzero:
|
||||
settings.set('general', 'subzero_mods', ','.join(subzero_mods))
|
||||
|
||||
|
|
|
@ -1,31 +1,64 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import ast
|
||||
import atexit
|
||||
import json
|
||||
import ast
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from dogpile.cache import make_region
|
||||
from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField, BigIntegerField, \
|
||||
DateTimeField, OperationalError, PostgresqlDatabase
|
||||
from playhouse.migrate import PostgresqlMigrator
|
||||
from playhouse.migrate import SqliteMigrator, migrate
|
||||
from playhouse.shortcuts import ThreadSafeDatabaseMetadata, ReconnectMixin
|
||||
from playhouse.sqlite_ext import RowIDField
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
|
||||
from utilities.path_mappings import path_mappings
|
||||
|
||||
from .config import settings, get_array_from
|
||||
from .get_args import args
|
||||
|
||||
database = SqliteQueueDatabase(os.path.join(args.config_dir, 'db', 'bazarr.db'),
|
||||
use_gevent=False,
|
||||
autostart=True,
|
||||
queue_max_size=256)
|
||||
migrator = SqliteMigrator(database)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
postgresql = settings.postgresql.getboolean('enabled')
|
||||
|
||||
region = make_region().configure('dogpile.cache.memory')
|
||||
|
||||
if postgresql:
|
||||
class ReconnectPostgresqlDatabase(ReconnectMixin, PostgresqlDatabase):
|
||||
reconnect_errors = (
|
||||
(OperationalError, 'server closed the connection unexpectedly'),
|
||||
)
|
||||
|
||||
|
||||
logger.debug(
|
||||
f"Connecting to PostgreSQL database: {settings.postgresql.host}:{settings.postgresql.port}/{settings.postgresql.database}")
|
||||
database = ReconnectPostgresqlDatabase(settings.postgresql.database,
|
||||
user=settings.postgresql.username,
|
||||
password=settings.postgresql.password,
|
||||
host=settings.postgresql.host,
|
||||
port=settings.postgresql.port,
|
||||
autocommit=True,
|
||||
autorollback=True,
|
||||
autoconnect=True,
|
||||
)
|
||||
migrator = PostgresqlMigrator(database)
|
||||
else:
|
||||
db_path = os.path.join(args.config_dir, 'db', 'bazarr.db')
|
||||
logger.debug(f"Connecting to SQLite database: {db_path}")
|
||||
database = SqliteQueueDatabase(db_path,
|
||||
use_gevent=False,
|
||||
autostart=True,
|
||||
queue_max_size=256)
|
||||
migrator = SqliteMigrator(database)
|
||||
|
||||
|
||||
@atexit.register
|
||||
def _stop_worker_threads():
|
||||
database.stop()
|
||||
if not postgresql:
|
||||
database.stop()
|
||||
|
||||
|
||||
class UnknownField(object):
|
||||
|
@ -35,6 +68,7 @@ class UnknownField(object):
|
|||
class BaseModel(Model):
|
||||
class Meta:
|
||||
database = database
|
||||
model_metadata_class = ThreadSafeDatabaseMetadata
|
||||
|
||||
|
||||
class System(BaseModel):
|
||||
|
@ -52,7 +86,7 @@ class TableBlacklist(BaseModel):
|
|||
sonarr_episode_id = IntegerField(null=True)
|
||||
sonarr_series_id = IntegerField(null=True)
|
||||
subs_id = TextField(null=True)
|
||||
timestamp = IntegerField(null=True)
|
||||
timestamp = DateTimeField(null=True)
|
||||
|
||||
class Meta:
|
||||
table_name = 'table_blacklist'
|
||||
|
@ -64,7 +98,7 @@ class TableBlacklistMovie(BaseModel):
|
|||
provider = TextField(null=True)
|
||||
radarr_id = IntegerField(null=True)
|
||||
subs_id = TextField(null=True)
|
||||
timestamp = IntegerField(null=True)
|
||||
timestamp = DateTimeField(null=True)
|
||||
|
||||
class Meta:
|
||||
table_name = 'table_blacklist_movie'
|
||||
|
@ -79,13 +113,13 @@ class TableEpisodes(BaseModel):
|
|||
episode_file_id = IntegerField(null=True)
|
||||
failedAttempts = TextField(null=True)
|
||||
ffprobe_cache = BlobField(null=True)
|
||||
file_size = IntegerField(default=0, null=True)
|
||||
file_size = BigIntegerField(default=0, null=True)
|
||||
format = TextField(null=True)
|
||||
missing_subtitles = TextField(null=True)
|
||||
monitored = TextField(null=True)
|
||||
path = TextField()
|
||||
resolution = TextField(null=True)
|
||||
scene_name = TextField(null=True)
|
||||
sceneName = TextField(null=True)
|
||||
season = IntegerField()
|
||||
sonarrEpisodeId = IntegerField(unique=True)
|
||||
sonarrSeriesId = IntegerField()
|
||||
|
@ -104,12 +138,12 @@ class TableHistory(BaseModel):
|
|||
id = AutoField()
|
||||
language = TextField(null=True)
|
||||
provider = TextField(null=True)
|
||||
score = TextField(null=True)
|
||||
score = IntegerField(null=True)
|
||||
sonarrEpisodeId = IntegerField()
|
||||
sonarrSeriesId = IntegerField()
|
||||
subs_id = TextField(null=True)
|
||||
subtitles_path = TextField(null=True)
|
||||
timestamp = IntegerField()
|
||||
timestamp = DateTimeField()
|
||||
video_path = TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -123,10 +157,10 @@ class TableHistoryMovie(BaseModel):
|
|||
language = TextField(null=True)
|
||||
provider = TextField(null=True)
|
||||
radarrId = IntegerField()
|
||||
score = TextField(null=True)
|
||||
score = IntegerField(null=True)
|
||||
subs_id = TextField(null=True)
|
||||
subtitles_path = TextField(null=True)
|
||||
timestamp = IntegerField()
|
||||
timestamp = DateTimeField()
|
||||
video_path = TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
|
@ -154,7 +188,7 @@ class TableMovies(BaseModel):
|
|||
failedAttempts = TextField(null=True)
|
||||
fanart = TextField(null=True)
|
||||
ffprobe_cache = BlobField(null=True)
|
||||
file_size = IntegerField(default=0, null=True)
|
||||
file_size = BigIntegerField(default=0, null=True)
|
||||
format = TextField(null=True)
|
||||
imdbId = TextField(null=True)
|
||||
missing_subtitles = TextField(null=True)
|
||||
|
@ -211,7 +245,7 @@ class TableSettingsNotifier(BaseModel):
|
|||
|
||||
|
||||
class TableShows(BaseModel):
|
||||
alternateTitles = TextField(null=True)
|
||||
alternativeTitles = TextField(null=True)
|
||||
audio_language = TextField(null=True)
|
||||
fanart = TextField(null=True)
|
||||
imdbId = TextField(default='""', null=True)
|
||||
|
@ -264,6 +298,15 @@ class TableCustomScoreProfileConditions(BaseModel):
|
|||
table_name = 'table_custom_score_profile_conditions'
|
||||
|
||||
|
||||
class TableAnnouncements(BaseModel):
|
||||
timestamp = DateTimeField()
|
||||
hash = TextField(null=True, unique=True)
|
||||
text = TextField(null=True)
|
||||
|
||||
class Meta:
|
||||
table_name = 'table_announcements'
|
||||
|
||||
|
||||
def init_db():
|
||||
# Create tables if they don't exists.
|
||||
database.create_tables([System,
|
||||
|
@ -280,7 +323,8 @@ def init_db():
|
|||
TableShows,
|
||||
TableShowsRootfolder,
|
||||
TableCustomScoreProfiles,
|
||||
TableCustomScoreProfileConditions])
|
||||
TableCustomScoreProfileConditions,
|
||||
TableAnnouncements])
|
||||
|
||||
# add the system table single row if it's not existing
|
||||
# we must retry until the tables are created
|
||||
|
@ -296,51 +340,185 @@ def init_db():
|
|||
|
||||
|
||||
def migrate_db():
|
||||
migrate(
|
||||
migrator.add_column('table_shows', 'year', TextField(null=True)),
|
||||
migrator.add_column('table_shows', 'alternateTitles', TextField(null=True)),
|
||||
migrator.add_column('table_shows', 'tags', TextField(default='[]', null=True)),
|
||||
migrator.add_column('table_shows', 'seriesType', TextField(default='""', null=True)),
|
||||
migrator.add_column('table_shows', 'imdbId', TextField(default='""', null=True)),
|
||||
migrator.add_column('table_shows', 'profileId', IntegerField(null=True)),
|
||||
migrator.add_column('table_shows', 'monitored', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'format', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'resolution', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'video_codec', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'audio_codec', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'episode_file_id', IntegerField(null=True)),
|
||||
migrator.add_column('table_episodes', 'audio_language', TextField(null=True)),
|
||||
migrator.add_column('table_episodes', 'file_size', IntegerField(default=0, null=True)),
|
||||
migrator.add_column('table_episodes', 'ffprobe_cache', BlobField(null=True)),
|
||||
migrator.add_column('table_movies', 'sortTitle', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'year', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'alternativeTitles', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'format', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'resolution', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'video_codec', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'audio_codec', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'imdbId', TextField(null=True)),
|
||||
migrator.add_column('table_movies', 'movie_file_id', IntegerField(null=True)),
|
||||
migrator.add_column('table_movies', 'tags', TextField(default='[]', null=True)),
|
||||
migrator.add_column('table_movies', 'profileId', IntegerField(null=True)),
|
||||
migrator.add_column('table_movies', 'file_size', IntegerField(default=0, null=True)),
|
||||
migrator.add_column('table_movies', 'ffprobe_cache', BlobField(null=True)),
|
||||
migrator.add_column('table_history', 'video_path', TextField(null=True)),
|
||||
migrator.add_column('table_history', 'language', TextField(null=True)),
|
||||
migrator.add_column('table_history', 'provider', TextField(null=True)),
|
||||
migrator.add_column('table_history', 'score', TextField(null=True)),
|
||||
migrator.add_column('table_history', 'subs_id', TextField(null=True)),
|
||||
migrator.add_column('table_history', 'subtitles_path', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'video_path', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'language', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'provider', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'score', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)),
|
||||
migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)),
|
||||
migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)),
|
||||
migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)),
|
||||
migrator.add_column('table_languages_profiles', 'originalFormat', BooleanField(null=True)),
|
||||
)
|
||||
table_shows = [t.name for t in database.get_columns('table_shows')]
|
||||
table_episodes = [t.name for t in database.get_columns('table_episodes')]
|
||||
table_movies = [t.name for t in database.get_columns('table_movies')]
|
||||
table_history = [t.name for t in database.get_columns('table_history')]
|
||||
table_history_movie = [t.name for t in database.get_columns('table_history_movie')]
|
||||
table_languages_profiles = [t.name for t in database.get_columns('table_languages_profiles')]
|
||||
if "year" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'year', TextField(null=True)))
|
||||
if "alternativeTitle" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'alternativeTitle', TextField(null=True)))
|
||||
if "tags" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'tags', TextField(default='[]', null=True)))
|
||||
if "seriesType" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'seriesType', TextField(default='""', null=True)))
|
||||
if "imdbId" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'imdbId', TextField(default='""', null=True)))
|
||||
if "profileId" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
|
||||
if "profileId" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
|
||||
if "monitored" not in table_shows:
|
||||
migrate(migrator.add_column('table_shows', 'monitored', TextField(null=True)))
|
||||
|
||||
if "format" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'format', TextField(null=True)))
|
||||
if "resolution" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'resolution', TextField(null=True)))
|
||||
if "video_codec" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'video_codec', TextField(null=True)))
|
||||
if "audio_codec" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'audio_codec', TextField(null=True)))
|
||||
if "episode_file_id" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'episode_file_id', IntegerField(null=True)))
|
||||
if "audio_language" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'audio_language', TextField(null=True)))
|
||||
if "file_size" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'file_size', BigIntegerField(default=0, null=True)))
|
||||
if "ffprobe_cache" not in table_episodes:
|
||||
migrate(migrator.add_column('table_episodes', 'ffprobe_cache', BlobField(null=True)))
|
||||
|
||||
if "sortTitle" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'sortTitle', TextField(null=True)))
|
||||
if "year" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'year', TextField(null=True)))
|
||||
if "alternativeTitles" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'alternativeTitles', TextField(null=True)))
|
||||
if "format" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'format', TextField(null=True)))
|
||||
if "resolution" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'resolution', TextField(null=True)))
|
||||
if "video_codec" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'video_codec', TextField(null=True)))
|
||||
if "audio_codec" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'audio_codec', TextField(null=True)))
|
||||
if "imdbId" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'imdbId', TextField(null=True)))
|
||||
if "movie_file_id" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'movie_file_id', IntegerField(null=True)))
|
||||
if "tags" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'tags', TextField(default='[]', null=True)))
|
||||
if "profileId" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'profileId', IntegerField(null=True)))
|
||||
if "file_size" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'file_size', BigIntegerField(default=0, null=True)))
|
||||
if "ffprobe_cache" not in table_movies:
|
||||
migrate(migrator.add_column('table_movies', 'ffprobe_cache', BlobField(null=True)))
|
||||
|
||||
if "video_path" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'video_path', TextField(null=True)))
|
||||
if "language" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'language', TextField(null=True)))
|
||||
if "provider" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'provider', TextField(null=True)))
|
||||
if "score" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'score', TextField(null=True)))
|
||||
if "subs_id" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'subs_id', TextField(null=True)))
|
||||
if "subtitles_path" not in table_history:
|
||||
migrate(migrator.add_column('table_history', 'subtitles_path', TextField(null=True)))
|
||||
|
||||
if "video_path" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'video_path', TextField(null=True)))
|
||||
if "language" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'language', TextField(null=True)))
|
||||
if "provider" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'provider', TextField(null=True)))
|
||||
if "score" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'score', TextField(null=True)))
|
||||
if "subs_id" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)))
|
||||
if "subtitles_path" not in table_history_movie:
|
||||
migrate(migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)))
|
||||
|
||||
if "mustContain" not in table_languages_profiles:
|
||||
migrate(migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)))
|
||||
if "mustNotContain" not in table_languages_profiles:
|
||||
migrate(migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)))
|
||||
if "originalFormat" not in table_languages_profiles:
|
||||
migrate(migrator.add_column('table_languages_profiles', 'originalFormat', BooleanField(null=True)))
|
||||
|
||||
if "languages" in table_shows:
|
||||
migrate(migrator.drop_column('table_shows', 'languages'))
|
||||
if "hearing_impaired" in table_shows:
|
||||
migrate(migrator.drop_column('table_shows', 'hearing_impaired'))
|
||||
|
||||
if "languages" in table_movies:
|
||||
migrate(migrator.drop_column('table_movies', 'languages'))
|
||||
if "hearing_impaired" in table_movies:
|
||||
migrate(migrator.drop_column('table_movies', 'hearing_impaired'))
|
||||
if not any(
|
||||
x
|
||||
for x in database.get_columns('table_blacklist')
|
||||
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
|
||||
):
|
||||
migrate(migrator.alter_column_type('table_blacklist', 'timestamp', DateTimeField(default=datetime.now)))
|
||||
update = TableBlacklist.select()
|
||||
for item in update:
|
||||
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
|
||||
|
||||
if not any(
|
||||
x
|
||||
for x in database.get_columns('table_blacklist_movie')
|
||||
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
|
||||
):
|
||||
migrate(migrator.alter_column_type('table_blacklist_movie', 'timestamp', DateTimeField(default=datetime.now)))
|
||||
update = TableBlacklistMovie.select()
|
||||
for item in update:
|
||||
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
|
||||
|
||||
if not any(
|
||||
x for x in database.get_columns('table_history') if x.name == "score" and x.data_type.lower() == "integer"):
|
||||
migrate(migrator.alter_column_type('table_history', 'score', IntegerField(null=True)))
|
||||
if not any(
|
||||
x
|
||||
for x in database.get_columns('table_history')
|
||||
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
|
||||
):
|
||||
migrate(migrator.alter_column_type('table_history', 'timestamp', DateTimeField(default=datetime.now)))
|
||||
update = TableHistory.select()
|
||||
list_to_update = []
|
||||
for i, item in enumerate(update):
|
||||
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
|
||||
list_to_update.append(item)
|
||||
if i % 100 == 0:
|
||||
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
|
||||
list_to_update = []
|
||||
if list_to_update:
|
||||
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
|
||||
|
||||
if not any(x for x in database.get_columns('table_history_movie') if
|
||||
x.name == "score" and x.data_type.lower() == "integer"):
|
||||
migrate(migrator.alter_column_type('table_history_movie', 'score', IntegerField(null=True)))
|
||||
if not any(
|
||||
x
|
||||
for x in database.get_columns('table_history_movie')
|
||||
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
|
||||
):
|
||||
migrate(migrator.alter_column_type('table_history_movie', 'timestamp', DateTimeField(default=datetime.now)))
|
||||
update = TableHistoryMovie.select()
|
||||
list_to_update = []
|
||||
for i, item in enumerate(update):
|
||||
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
|
||||
list_to_update.append(item)
|
||||
if i % 100 == 0:
|
||||
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
|
||||
list_to_update = []
|
||||
if list_to_update:
|
||||
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
|
||||
# if not any(x for x in database.get_columns('table_movies') if x.name == "monitored" and x.data_type == "BOOLEAN"):
|
||||
# migrate(migrator.alter_column_type('table_movies', 'monitored', BooleanField(null=True)))
|
||||
|
||||
if database.get_columns('table_settings_providers'):
|
||||
database.execute_sql('drop table if exists table_settings_providers;')
|
||||
|
||||
if "alternateTitles" in table_shows:
|
||||
migrate(migrator.rename_column('table_shows', 'alternateTitles', "alternativeTitles"))
|
||||
|
||||
if "scene_name" in table_episodes:
|
||||
migrate(migrator.rename_column('table_episodes', 'scene_name', "sceneName"))
|
||||
|
||||
|
||||
class SqliteDictPathMapper:
|
||||
|
@ -376,21 +554,21 @@ def get_exclusion_clause(exclusion_type):
|
|||
if exclusion_type == 'series':
|
||||
tagsList = ast.literal_eval(settings.sonarr.excluded_tags)
|
||||
for tag in tagsList:
|
||||
where_clause.append(~(TableShows.tags.contains("\'"+tag+"\'")))
|
||||
where_clause.append(~(TableShows.tags.contains("\'" + tag + "\'")))
|
||||
else:
|
||||
tagsList = ast.literal_eval(settings.radarr.excluded_tags)
|
||||
for tag in tagsList:
|
||||
where_clause.append(~(TableMovies.tags.contains("\'"+tag+"\'")))
|
||||
where_clause.append(~(TableMovies.tags.contains("\'" + tag + "\'")))
|
||||
|
||||
if exclusion_type == 'series':
|
||||
monitoredOnly = settings.sonarr.getboolean('only_monitored')
|
||||
if monitoredOnly:
|
||||
where_clause.append((TableEpisodes.monitored == 'True'))
|
||||
where_clause.append((TableShows.monitored == 'True'))
|
||||
where_clause.append((TableEpisodes.monitored == True)) # noqa E712
|
||||
where_clause.append((TableShows.monitored == True)) # noqa E712
|
||||
else:
|
||||
monitoredOnly = settings.radarr.getboolean('only_monitored')
|
||||
if monitoredOnly:
|
||||
where_clause.append((TableMovies.monitored == 'True'))
|
||||
where_clause.append((TableMovies.monitored == True)) # noqa E712
|
||||
|
||||
if exclusion_type == 'series':
|
||||
typesList = get_array_from(settings.sonarr.excluded_series_types)
|
||||
|
@ -404,6 +582,7 @@ def get_exclusion_clause(exclusion_type):
|
|||
return where_clause
|
||||
|
||||
|
||||
@region.cache_on_arguments()
|
||||
def update_profile_id_list():
|
||||
profile_id_list = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId,
|
||||
TableLanguagesProfiles.name,
|
||||
|
@ -487,52 +666,54 @@ def get_profile_cutoff(profile_id):
|
|||
return cutoff_language
|
||||
|
||||
|
||||
def get_audio_profile_languages(series_id=None, episode_id=None, movie_id=None):
|
||||
from languages.get_languages import alpha2_from_language, alpha3_from_language
|
||||
def get_audio_profile_languages(audio_languages_list_str):
|
||||
from languages.get_languages import alpha2_from_language, alpha3_from_language, language_from_alpha2
|
||||
audio_languages = []
|
||||
|
||||
if series_id:
|
||||
audio_languages_list_str = TableShows.get(TableShows.sonarrSeriesId == series_id).audio_language
|
||||
elif episode_id:
|
||||
audio_languages_list_str = TableEpisodes.get(TableEpisodes.sonarrEpisodeId == episode_id).audio_language
|
||||
elif movie_id:
|
||||
audio_languages_list_str = TableMovies.get(TableMovies.radarrId == movie_id).audio_language
|
||||
else:
|
||||
return audio_languages
|
||||
und_default_language = language_from_alpha2(settings.general.default_und_audio_lang)
|
||||
|
||||
try:
|
||||
audio_languages_list = ast.literal_eval(audio_languages_list_str)
|
||||
audio_languages_list = ast.literal_eval(audio_languages_list_str or '[]')
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
for language in audio_languages_list:
|
||||
audio_languages.append(
|
||||
{"name": language,
|
||||
"code2": alpha2_from_language(language) or None,
|
||||
"code3": alpha3_from_language(language) or None}
|
||||
)
|
||||
if language:
|
||||
audio_languages.append(
|
||||
{"name": language,
|
||||
"code2": alpha2_from_language(language) or None,
|
||||
"code3": alpha3_from_language(language) or None}
|
||||
)
|
||||
else:
|
||||
if und_default_language:
|
||||
logging.debug(f"Undefined language audio track treated as {und_default_language}")
|
||||
audio_languages.append(
|
||||
{"name": und_default_language,
|
||||
"code2": alpha2_from_language(und_default_language) or None,
|
||||
"code3": alpha3_from_language(und_default_language) or None}
|
||||
)
|
||||
|
||||
return audio_languages
|
||||
|
||||
|
||||
def get_profile_id(series_id=None, episode_id=None, movie_id=None):
|
||||
if series_id:
|
||||
data = TableShows.select(TableShows.profileId)\
|
||||
.where(TableShows.sonarrSeriesId == series_id)\
|
||||
data = TableShows.select(TableShows.profileId) \
|
||||
.where(TableShows.sonarrSeriesId == series_id) \
|
||||
.get_or_none()
|
||||
if data:
|
||||
return data.profileId
|
||||
elif episode_id:
|
||||
data = TableShows.select(TableShows.profileId)\
|
||||
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId))\
|
||||
.where(TableEpisodes.sonarrEpisodeId == episode_id)\
|
||||
data = TableShows.select(TableShows.profileId) \
|
||||
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId)) \
|
||||
.where(TableEpisodes.sonarrEpisodeId == episode_id) \
|
||||
.get_or_none()
|
||||
if data:
|
||||
return data.profileId
|
||||
|
||||
elif movie_id:
|
||||
data = TableMovies.select(TableMovies.profileId)\
|
||||
.where(TableMovies.radarrId == movie_id)\
|
||||
data = TableMovies.select(TableMovies.profileId) \
|
||||
.where(TableMovies.radarrId == movie_id) \
|
||||
.get_or_none()
|
||||
if data:
|
||||
return data.profileId
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
import ast
|
||||
import os
|
||||
import datetime
|
||||
import pytz
|
||||
|
@ -143,6 +144,14 @@ def get_providers():
|
|||
return providers_list
|
||||
|
||||
|
||||
def get_enabled_providers():
|
||||
# return enabled provider including those who can be throttled
|
||||
try:
|
||||
return ast.literal_eval(settings.general.enabled_providers)
|
||||
except (ValueError, TypeError, SyntaxError, MemoryError, RecursionError):
|
||||
return []
|
||||
|
||||
|
||||
_FFPROBE_BINARY = get_binary("ffprobe")
|
||||
_FFMPEG_BINARY = get_binary("ffmpeg")
|
||||
|
||||
|
@ -240,6 +249,9 @@ def get_providers_auth():
|
|||
'f_username': settings.karagarga.f_username,
|
||||
'f_password': settings.karagarga.f_password,
|
||||
},
|
||||
'subf2m': {
|
||||
'verify_ssl': settings.subf2m.getboolean('verify_ssl')
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -12,7 +12,12 @@ from apscheduler.jobstores.base import JobLookupError
|
|||
from datetime import datetime, timedelta
|
||||
from calendar import day_name
|
||||
from random import randrange
|
||||
from tzlocal import get_localzone
|
||||
from tzlocal.utils import ZoneInfoNotFoundError
|
||||
from dateutil import tz
|
||||
import logging
|
||||
|
||||
from app.announcements import get_announcements_to_file
|
||||
from sonarr.sync.series import update_series
|
||||
from sonarr.sync.episodes import sync_episodes, update_all_episodes
|
||||
from radarr.sync.movies import update_movies, update_all_movies
|
||||
|
@ -37,7 +42,13 @@ class Scheduler:
|
|||
def __init__(self):
|
||||
self.__running_tasks = []
|
||||
|
||||
self.aps_scheduler = BackgroundScheduler()
|
||||
try:
|
||||
self.timezone = get_localzone()
|
||||
except ZoneInfoNotFoundError as e:
|
||||
logging.error(f"BAZARR cannot use specified timezone: {e}")
|
||||
self.timezone = tz.gettz("UTC")
|
||||
|
||||
self.aps_scheduler = BackgroundScheduler({'apscheduler.timezone': self.timezone})
|
||||
|
||||
# task listener
|
||||
def task_listener_add(event):
|
||||
|
@ -252,16 +263,22 @@ class Scheduler:
|
|||
check_releases, IntervalTrigger(hours=3), max_instances=1, coalesce=True, misfire_grace_time=15,
|
||||
id='update_release', name='Update Release Info', replace_existing=True)
|
||||
|
||||
self.aps_scheduler.add_job(
|
||||
get_announcements_to_file, IntervalTrigger(hours=6), max_instances=1, coalesce=True, misfire_grace_time=15,
|
||||
id='update_announcements', name='Update Announcements File', replace_existing=True)
|
||||
|
||||
def __search_wanted_subtitles_task(self):
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
self.aps_scheduler.add_job(
|
||||
wanted_search_missing_subtitles_series, IntervalTrigger(hours=int(settings.general.wanted_search_frequency)),
|
||||
max_instances=1, coalesce=True, misfire_grace_time=15, id='wanted_search_missing_subtitles_series',
|
||||
name='Search for wanted Series Subtitles', replace_existing=True)
|
||||
wanted_search_missing_subtitles_series,
|
||||
IntervalTrigger(hours=int(settings.general.wanted_search_frequency)), max_instances=1, coalesce=True,
|
||||
misfire_grace_time=15, id='wanted_search_missing_subtitles_series', replace_existing=True,
|
||||
name='Search for wanted Series Subtitles')
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
self.aps_scheduler.add_job(
|
||||
wanted_search_missing_subtitles_movies, IntervalTrigger(hours=int(settings.general.wanted_search_frequency_movie)),
|
||||
max_instances=1, coalesce=True, misfire_grace_time=15, id='wanted_search_missing_subtitles_movies',
|
||||
wanted_search_missing_subtitles_movies,
|
||||
IntervalTrigger(hours=int(settings.general.wanted_search_frequency_movie)), max_instances=1,
|
||||
coalesce=True, misfire_grace_time=15, id='wanted_search_missing_subtitles_movies',
|
||||
name='Search for wanted Movies Subtitles', replace_existing=True)
|
||||
|
||||
def __upgrade_subtitles_task(self):
|
||||
|
@ -275,7 +292,11 @@ class Scheduler:
|
|||
def __randomize_interval_task(self):
|
||||
for job in self.aps_scheduler.get_jobs():
|
||||
if isinstance(job.trigger, IntervalTrigger):
|
||||
self.aps_scheduler.modify_job(job.id, next_run_time=datetime.now() + timedelta(seconds=randrange(job.trigger.interval.total_seconds()*0.75, job.trigger.interval.total_seconds())))
|
||||
self.aps_scheduler.modify_job(job.id,
|
||||
next_run_time=datetime.now(tz=self.timezone) +
|
||||
timedelta(seconds=randrange(
|
||||
job.trigger.interval.total_seconds() * 0.75,
|
||||
job.trigger.interval.total_seconds())))
|
||||
|
||||
def __no_task(self):
|
||||
for job in self.aps_scheduler.get_jobs():
|
||||
|
|
|
@ -20,7 +20,6 @@ from radarr.sync.movies import update_movies, update_one_movie
|
|||
from sonarr.info import get_sonarr_info, url_sonarr
|
||||
from radarr.info import url_radarr
|
||||
from .database import TableShows
|
||||
from .event_handler import event_stream
|
||||
|
||||
from .config import settings
|
||||
from .scheduler import scheduler
|
||||
|
@ -285,10 +284,10 @@ def dispatcher(data):
|
|||
|
||||
if topic == 'series':
|
||||
logging.debug(f'Event received from Sonarr for series: {series_title} ({series_year})')
|
||||
update_one_series(series_id=media_id, action=action)
|
||||
update_one_series(series_id=media_id, action=action, send_event=False)
|
||||
if episodesChanged:
|
||||
# this will happen if a season monitored status is changed.
|
||||
sync_episodes(series_id=media_id, send_event=True)
|
||||
sync_episodes(series_id=media_id, send_event=False)
|
||||
elif topic == 'episode':
|
||||
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
|
||||
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')
|
||||
|
|
|
@ -74,12 +74,14 @@ def catch_all(path):
|
|||
updated = '0'
|
||||
|
||||
inject = dict()
|
||||
inject["baseUrl"] = base_url
|
||||
inject["canUpdate"] = not args.no_update
|
||||
inject["hasUpdate"] = updated != '0'
|
||||
|
||||
if auth:
|
||||
inject["apiKey"] = settings.auth.apikey
|
||||
if not path.startswith('api/'):
|
||||
inject["baseUrl"] = base_url
|
||||
inject["canUpdate"] = not args.no_update
|
||||
inject["hasUpdate"] = updated != '0'
|
||||
|
||||
if auth:
|
||||
inject["apiKey"] = settings.auth.apikey
|
||||
|
||||
template_url = base_url
|
||||
if not template_url.endswith("/"):
|
||||
|
|
|
@ -65,7 +65,7 @@ import logging # noqa E402
|
|||
def is_virtualenv():
|
||||
# return True if Bazarr have been start from within a virtualenv or venv
|
||||
base_prefix = getattr(sys, "base_prefix", None)
|
||||
# real_prefix will return None if not in a virtualenv enviroment or the default python path
|
||||
# real_prefix will return None if not in a virtualenv environment or the default python path
|
||||
real_prefix = getattr(sys, "real_prefix", None) or sys.prefix
|
||||
return base_prefix != real_prefix
|
||||
|
||||
|
@ -177,6 +177,11 @@ if not os.path.exists(os.path.join(args.config_dir, 'config', 'releases.txt')):
|
|||
check_releases()
|
||||
logging.debug("BAZARR Created releases file")
|
||||
|
||||
if not os.path.exists(os.path.join(args.config_dir, 'config', 'announcements.txt')):
|
||||
from app.announcements import get_announcements_to_file
|
||||
get_announcements_to_file()
|
||||
logging.debug("BAZARR Created announcements file")
|
||||
|
||||
config_file = os.path.normpath(os.path.join(args.config_dir, 'config', 'config.ini'))
|
||||
|
||||
# Move GA visitor from config.ini to dedicated file
|
||||
|
|
|
@ -39,9 +39,12 @@ from app.notifier import update_notifier # noqa E402
|
|||
from languages.get_languages import load_language_in_db # noqa E402
|
||||
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
|
||||
from app.server import webserver # noqa E402
|
||||
from app.announcements import get_announcements_to_file # noqa E402
|
||||
|
||||
configure_proxy_func()
|
||||
|
||||
get_announcements_to_file()
|
||||
|
||||
# Reset the updated once Bazarr have been restarted after an update
|
||||
System.update({System.updated: '0'}).execute()
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from app.database import TableBlacklistMovie
|
||||
from app.event_handler import event_stream
|
||||
|
@ -19,7 +19,7 @@ def get_blacklist_movie():
|
|||
def blacklist_log_movie(radarr_id, provider, subs_id, language):
|
||||
TableBlacklistMovie.insert({
|
||||
TableBlacklistMovie.radarr_id: radarr_id,
|
||||
TableBlacklistMovie.timestamp: time.time(),
|
||||
TableBlacklistMovie.timestamp: datetime.now(),
|
||||
TableBlacklistMovie.provider: provider,
|
||||
TableBlacklistMovie.subs_id: subs_id,
|
||||
TableBlacklistMovie.language: language
|
||||
|
|
|
@ -21,7 +21,7 @@ def browse_radarr_filesystem(path='#'):
|
|||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.radarr.apikey
|
||||
try:
|
||||
r = requests.get(url_radarr_api_filesystem, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_radarr_api_filesystem, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get series from Radarr. Http error.")
|
||||
|
|
|
@ -1,17 +1,24 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from app.database import TableHistoryMovie
|
||||
from app.event_handler import event_stream
|
||||
|
||||
|
||||
def history_log_movie(action, radarr_id, description, video_path=None, language=None, provider=None, score=None,
|
||||
subs_id=None, subtitles_path=None):
|
||||
def history_log_movie(action, radarr_id, result, fake_provider=None, fake_score=None):
|
||||
description = result.message
|
||||
video_path = result.path
|
||||
language = result.language_code
|
||||
provider = fake_provider or result.provider
|
||||
score = fake_score or result.score
|
||||
subs_id = result.subs_id
|
||||
subtitles_path = result.subs_path
|
||||
|
||||
TableHistoryMovie.insert({
|
||||
TableHistoryMovie.action: action,
|
||||
TableHistoryMovie.radarrId: radarr_id,
|
||||
TableHistoryMovie.timestamp: time.time(),
|
||||
TableHistoryMovie.timestamp: datetime.now(),
|
||||
TableHistoryMovie.description: description,
|
||||
TableHistoryMovie.video_path: video_path,
|
||||
TableHistoryMovie.language: language,
|
||||
|
|
|
@ -29,7 +29,7 @@ class GetRadarrInfo:
|
|||
if settings.general.getboolean('use_radarr'):
|
||||
try:
|
||||
rv = url_radarr() + "/api/system/status?apikey=" + settings.radarr.apikey
|
||||
radarr_json = requests.get(rv, timeout=60, verify=False, headers=headers).json()
|
||||
radarr_json = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()
|
||||
if 'version' in radarr_json:
|
||||
radarr_version = radarr_json['version']
|
||||
else:
|
||||
|
@ -37,7 +37,7 @@ class GetRadarrInfo:
|
|||
except json.decoder.JSONDecodeError:
|
||||
try:
|
||||
rv = url_radarr() + "/api/v3/system/status?apikey=" + settings.radarr.apikey
|
||||
radarr_version = requests.get(rv, timeout=60, verify=False, headers=headers).json()['version']
|
||||
radarr_version = requests.get(rv, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers).json()['version']
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.debug('BAZARR cannot get Radarr version')
|
||||
radarr_version = 'unknown'
|
||||
|
|
|
@ -18,6 +18,6 @@ def notify_radarr(radarr_id):
|
|||
'name': 'RescanMovie',
|
||||
'movieId': int(radarr_id)
|
||||
}
|
||||
requests.post(url, json=data, timeout=60, verify=False, headers=headers)
|
||||
requests.post(url, json=data, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
except Exception:
|
||||
logging.exception('BAZARR cannot notify Radarr')
|
||||
|
|
|
@ -22,7 +22,7 @@ def get_radarr_rootfolder():
|
|||
url_radarr_api_rootfolder = url_radarr() + "/api/v3/rootfolder?apikey=" + apikey_radarr
|
||||
|
||||
try:
|
||||
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=60, verify=False, headers=headers)
|
||||
rootfolder = requests.get(url_radarr_api_rootfolder, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get rootfolder from Radarr. Connection Error.")
|
||||
return []
|
||||
|
|
|
@ -147,12 +147,12 @@ def update_movies(send_event=True):
|
|||
# Insert new movies in DB
|
||||
for added_movie in movies_to_add:
|
||||
try:
|
||||
result = TableMovies.insert(added_movie).on_conflict(action='IGNORE').execute()
|
||||
result = TableMovies.insert(added_movie).on_conflict_ignore().execute()
|
||||
except IntegrityError as e:
|
||||
logging.error(f"BAZARR cannot insert movie {added_movie['path']} because of {e}")
|
||||
continue
|
||||
else:
|
||||
if result > 0:
|
||||
if result and result > 0:
|
||||
altered_movies.append([added_movie['tmdbId'],
|
||||
added_movie['path'],
|
||||
added_movie['radarrId'],
|
||||
|
|
|
@ -2,8 +2,11 @@
|
|||
|
||||
import os
|
||||
|
||||
from radarr.info import get_radarr_info
|
||||
from app.config import settings
|
||||
from languages.get_languages import language_from_alpha2
|
||||
from radarr.info import get_radarr_info
|
||||
from utilities.video_analyzer import embedded_audio_reader
|
||||
from utilities.path_mappings import path_mappings
|
||||
|
||||
from .converter import RadarrFormatAudioCodec, RadarrFormatVideoCodec
|
||||
|
||||
|
@ -89,25 +92,31 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
videoCodec = None
|
||||
audioCodec = None
|
||||
|
||||
audio_language = []
|
||||
if get_radarr_info.is_legacy():
|
||||
if 'mediaInfo' in movie['movieFile']:
|
||||
if 'audioLanguages' in movie['movieFile']['mediaInfo']:
|
||||
audio_languages_list = movie['movieFile']['mediaInfo']['audioLanguages'].split('/')
|
||||
if len(audio_languages_list):
|
||||
for audio_language_list in audio_languages_list:
|
||||
audio_language.append(audio_language_list.strip())
|
||||
if not audio_language:
|
||||
audio_language = profile_id_to_language(movie['qualityProfileId'], audio_profiles)
|
||||
if settings.general.getboolean('parse_embedded_audio_track'):
|
||||
audio_language = embedded_audio_reader(path_mappings.path_replace_movie(movie['movieFile']['path']),
|
||||
file_size=movie['movieFile']['size'],
|
||||
movie_file_id=movie['movieFile']['id'],
|
||||
use_cache=True)
|
||||
else:
|
||||
if 'languages' in movie['movieFile'] and len(movie['movieFile']['languages']):
|
||||
for item in movie['movieFile']['languages']:
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
language = item['name']
|
||||
if item['name'] == 'Portuguese (Brazil)':
|
||||
language = language_from_alpha2('pb')
|
||||
audio_language.append(language)
|
||||
audio_language = []
|
||||
if get_radarr_info.is_legacy():
|
||||
if 'mediaInfo' in movie['movieFile']:
|
||||
if 'audioLanguages' in movie['movieFile']['mediaInfo']:
|
||||
audio_languages_list = movie['movieFile']['mediaInfo']['audioLanguages'].split('/')
|
||||
if len(audio_languages_list):
|
||||
for audio_language_list in audio_languages_list:
|
||||
audio_language.append(audio_language_list.strip())
|
||||
if not audio_language:
|
||||
audio_language = profile_id_to_language(movie['qualityProfileId'], audio_profiles)
|
||||
else:
|
||||
if 'languages' in movie['movieFile'] and len(movie['movieFile']['languages']):
|
||||
for item in movie['movieFile']['languages']:
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
language = item['name']
|
||||
if item['name'] == 'Portuguese (Brazil)':
|
||||
language = language_from_alpha2('pb')
|
||||
audio_language.append(language)
|
||||
|
||||
tags = [d['label'] for d in tags_dict if d['id'] in movie['tags']]
|
||||
|
||||
|
@ -160,8 +169,8 @@ def movieParser(movie, action, tags_dict, movie_default_profile, audio_profiles)
|
|||
|
||||
|
||||
def profile_id_to_language(id, profiles):
|
||||
profiles_to_return = []
|
||||
for profile in profiles:
|
||||
profiles_to_return = []
|
||||
if id == profile[0]:
|
||||
profiles_to_return.append(profile[1])
|
||||
return profiles_to_return
|
||||
|
|
|
@ -18,7 +18,7 @@ def get_profile_list():
|
|||
url_radarr_api_movies = url_radarr() + "/api/v3/qualityprofile?apikey=" + apikey_radarr
|
||||
|
||||
try:
|
||||
profiles_json = requests.get(url_radarr_api_movies, timeout=60, verify=False, headers=headers)
|
||||
profiles_json = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get profiles from Radarr. Connection Error.")
|
||||
except requests.exceptions.Timeout:
|
||||
|
@ -50,7 +50,7 @@ def get_tags():
|
|||
url_radarr_api_series = url_radarr() + "/api/v3/tag?apikey=" + apikey_radarr
|
||||
|
||||
try:
|
||||
tagsDict = requests.get(url_radarr_api_series, timeout=60, verify=False, headers=headers)
|
||||
tagsDict = requests.get(url_radarr_api_series, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get tags from Radarr. Connection Error.")
|
||||
return []
|
||||
|
@ -79,7 +79,7 @@ def get_movies_from_radarr_api(url, apikey_radarr, radarr_id=None):
|
|||
apikey_radarr
|
||||
|
||||
try:
|
||||
r = requests.get(url_radarr_api_movies, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_radarr_api_movies, timeout=int(settings.radarr.http_timeout), verify=False, headers=headers)
|
||||
if r.status_code == 404:
|
||||
return
|
||||
r.raise_for_status()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from app.database import TableBlacklist
|
||||
from app.event_handler import event_stream
|
||||
|
@ -20,7 +20,7 @@ def blacklist_log(sonarr_series_id, sonarr_episode_id, provider, subs_id, langua
|
|||
TableBlacklist.insert({
|
||||
TableBlacklist.sonarr_series_id: sonarr_series_id,
|
||||
TableBlacklist.sonarr_episode_id: sonarr_episode_id,
|
||||
TableBlacklist.timestamp: time.time(),
|
||||
TableBlacklist.timestamp: datetime.now(),
|
||||
TableBlacklist.provider: provider,
|
||||
TableBlacklist.subs_id: subs_id,
|
||||
TableBlacklist.language: language
|
||||
|
|
|
@ -20,7 +20,7 @@ def browse_sonarr_filesystem(path='#'):
|
|||
"&allowFoldersWithoutTrailingSlashes=true&includeFiles=false&apikey=" + \
|
||||
settings.sonarr.apikey
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_filesystem, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_filesystem, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get series from Sonarr. Http error.")
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
# coding=utf-8
|
||||
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from app.database import TableHistory
|
||||
from app.event_handler import event_stream
|
||||
|
||||
|
||||
def history_log(action, sonarr_series_id, sonarr_episode_id, description, video_path=None, language=None, provider=None,
|
||||
score=None, subs_id=None, subtitles_path=None):
|
||||
def history_log(action, sonarr_series_id, sonarr_episode_id, result, fake_provider=None, fake_score=None):
|
||||
description = result.message
|
||||
video_path = result.path
|
||||
language = result.language_code
|
||||
provider = fake_provider or result.provider
|
||||
score = fake_score or result.score
|
||||
subs_id = result.subs_id
|
||||
subtitles_path = result.subs_path
|
||||
|
||||
TableHistory.insert({
|
||||
TableHistory.action: action,
|
||||
TableHistory.sonarrSeriesId: sonarr_series_id,
|
||||
TableHistory.sonarrEpisodeId: sonarr_episode_id,
|
||||
TableHistory.timestamp: time.time(),
|
||||
TableHistory.timestamp: datetime.now(),
|
||||
TableHistory.description: description,
|
||||
TableHistory.video_path: video_path,
|
||||
TableHistory.language: language,
|
||||
|
|
|
@ -29,7 +29,7 @@ class GetSonarrInfo:
|
|||
if settings.general.getboolean('use_sonarr'):
|
||||
try:
|
||||
sv = url_sonarr() + "/api/system/status?apikey=" + settings.sonarr.apikey
|
||||
sonarr_json = requests.get(sv, timeout=60, verify=False, headers=headers).json()
|
||||
sonarr_json = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()
|
||||
if 'version' in sonarr_json:
|
||||
sonarr_version = sonarr_json['version']
|
||||
else:
|
||||
|
@ -37,7 +37,7 @@ class GetSonarrInfo:
|
|||
except json.decoder.JSONDecodeError:
|
||||
try:
|
||||
sv = url_sonarr() + "/api/v3/system/status?apikey=" + settings.sonarr.apikey
|
||||
sonarr_version = requests.get(sv, timeout=60, verify=False, headers=headers).json()['version']
|
||||
sonarr_version = requests.get(sv, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers).json()['version']
|
||||
except json.decoder.JSONDecodeError:
|
||||
logging.debug('BAZARR cannot get Sonarr version')
|
||||
sonarr_version = 'unknown'
|
||||
|
|
|
@ -18,6 +18,6 @@ def notify_sonarr(sonarr_series_id):
|
|||
'name': 'RescanSeries',
|
||||
'seriesId': int(sonarr_series_id)
|
||||
}
|
||||
requests.post(url, json=data, timeout=60, verify=False, headers=headers)
|
||||
requests.post(url, json=data, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
except Exception:
|
||||
logging.exception('BAZARR cannot notify Sonarr')
|
||||
|
|
|
@ -22,7 +22,7 @@ def get_sonarr_rootfolder():
|
|||
url_sonarr_api_rootfolder = url_sonarr() + "/api/v3/rootfolder?apikey=" + apikey_sonarr
|
||||
|
||||
try:
|
||||
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=60, verify=False, headers=headers)
|
||||
rootfolder = requests.get(url_sonarr_api_rootfolder, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get rootfolder from Sonarr. Connection Error.")
|
||||
return []
|
||||
|
|
|
@ -119,7 +119,7 @@ def sync_episodes(series_id=None, send_event=True):
|
|||
TableEpisodes.path,
|
||||
TableEpisodes.season,
|
||||
TableEpisodes.episode,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.sceneName,
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.format,
|
||||
TableEpisodes.resolution,
|
||||
|
@ -149,12 +149,12 @@ def sync_episodes(series_id=None, send_event=True):
|
|||
# Insert new episodes in DB
|
||||
for added_episode in episodes_to_add:
|
||||
try:
|
||||
result = TableEpisodes.insert(added_episode).on_conflict(action='IGNORE').execute()
|
||||
result = TableEpisodes.insert(added_episode).on_conflict_ignore().execute()
|
||||
except IntegrityError as e:
|
||||
logging.error(f"BAZARR cannot insert episode {added_episode['path']} because of {e}")
|
||||
continue
|
||||
else:
|
||||
if result > 0:
|
||||
if result and result > 0:
|
||||
altered_episodes.append([added_episode['sonarrEpisodeId'],
|
||||
added_episode['path'],
|
||||
added_episode['monitored']])
|
||||
|
|
|
@ -2,9 +2,11 @@
|
|||
|
||||
import os
|
||||
|
||||
from app.config import settings
|
||||
from app.database import TableShows
|
||||
from sonarr.info import get_sonarr_info
|
||||
from utilities.path_mappings import path_mappings
|
||||
from utilities.video_analyzer import embedded_audio_reader
|
||||
from sonarr.info import get_sonarr_info
|
||||
|
||||
from .converter import SonarrFormatVideoCodec, SonarrFormatAudioCodec
|
||||
|
||||
|
@ -25,19 +27,20 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
if show['alternateTitles'] is not None:
|
||||
alternate_titles = str([item['title'] for item in show['alternateTitles']])
|
||||
|
||||
audio_language = []
|
||||
if get_sonarr_info.is_legacy():
|
||||
audio_language = profile_id_to_language(show['qualityProfileId'], audio_profiles)
|
||||
else:
|
||||
if 'languageProfileId' in show:
|
||||
audio_language = profile_id_to_language(show['languageProfileId'], audio_profiles)
|
||||
else:
|
||||
audio_language = []
|
||||
|
||||
tags = [d['label'] for d in tags_dict if d['id'] in show['tags']]
|
||||
|
||||
imdbId = show['imdbId'] if 'imdbId' in show else None
|
||||
|
||||
audio_language = []
|
||||
if not settings.general.getboolean('parse_embedded_audio_track'):
|
||||
if get_sonarr_info.is_legacy():
|
||||
audio_language = profile_id_to_language(show['qualityProfileId'], audio_profiles)
|
||||
else:
|
||||
if 'languageProfileId' in show:
|
||||
audio_language = profile_id_to_language(show['languageProfileId'], audio_profiles)
|
||||
else:
|
||||
audio_language = []
|
||||
|
||||
if action == 'update':
|
||||
return {'title': show["title"],
|
||||
'path': show["path"],
|
||||
|
@ -49,7 +52,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
'audio_language': str(audio_language),
|
||||
'sortTitle': show['sortTitle'],
|
||||
'year': str(show['year']),
|
||||
'alternateTitles': alternate_titles,
|
||||
'alternativeTitles': alternate_titles,
|
||||
'tags': str(tags),
|
||||
'seriesType': show['seriesType'],
|
||||
'imdbId': imdbId,
|
||||
|
@ -65,7 +68,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
|
|||
'audio_language': str(audio_language),
|
||||
'sortTitle': show['sortTitle'],
|
||||
'year': str(show['year']),
|
||||
'alternateTitles': alternate_titles,
|
||||
'alternativeTitles': alternate_titles,
|
||||
'tags': str(tags),
|
||||
'seriesType': show['seriesType'],
|
||||
'imdbId': imdbId,
|
||||
|
@ -95,20 +98,28 @@ def episodeParser(episode):
|
|||
else:
|
||||
sceneName = None
|
||||
|
||||
audio_language = []
|
||||
if 'language' in episode['episodeFile'] and len(episode['episodeFile']['language']):
|
||||
item = episode['episodeFile']['language']
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
audio_language.append(item['name'])
|
||||
elif 'languages' in episode['episodeFile'] and len(episode['episodeFile']['languages']):
|
||||
items = episode['episodeFile']['languages']
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if settings.general.getboolean('parse_embedded_audio_track'):
|
||||
audio_language = embedded_audio_reader(path_mappings.path_replace(episode['episodeFile']
|
||||
['path']),
|
||||
file_size=episode['episodeFile']['size'],
|
||||
episode_file_id=episode['episodeFile']['id'],
|
||||
use_cache=True)
|
||||
else:
|
||||
audio_language = []
|
||||
if 'language' in episode['episodeFile'] and len(episode['episodeFile']['language']):
|
||||
item = episode['episodeFile']['language']
|
||||
if isinstance(item, dict):
|
||||
if 'name' in item:
|
||||
audio_language.append(item['name'])
|
||||
else:
|
||||
audio_language = TableShows.get(TableShows.sonarrSeriesId == episode['seriesId']).audio_language
|
||||
elif 'languages' in episode['episodeFile'] and len(episode['episodeFile']['languages']):
|
||||
items = episode['episodeFile']['languages']
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if 'name' in item:
|
||||
audio_language.append(item['name'])
|
||||
else:
|
||||
audio_language = TableShows.get(
|
||||
TableShows.sonarrSeriesId == episode['seriesId']).audio_language
|
||||
|
||||
if 'mediaInfo' in episode['episodeFile']:
|
||||
if 'videoCodec' in episode['episodeFile']['mediaInfo']:
|
||||
|
@ -141,7 +152,7 @@ def episodeParser(episode):
|
|||
'path': episode['episodeFile']['path'],
|
||||
'season': episode['seasonNumber'],
|
||||
'episode': episode['episodeNumber'],
|
||||
'scene_name': sceneName,
|
||||
'sceneName': sceneName,
|
||||
'monitored': str(bool(episode['monitored'])),
|
||||
'format': video_format,
|
||||
'resolution': video_resolution,
|
||||
|
|
|
@ -97,7 +97,7 @@ def update_series(send_event=True):
|
|||
TableShows.audio_language,
|
||||
TableShows.sortTitle,
|
||||
TableShows.year,
|
||||
TableShows.alternateTitles,
|
||||
TableShows.alternativeTitles,
|
||||
TableShows.tags,
|
||||
TableShows.seriesType,
|
||||
TableShows.imdbId,
|
||||
|
@ -200,7 +200,7 @@ def update_one_series(series_id, action):
|
|||
except IntegrityError as e:
|
||||
logging.error(f"BAZARR cannot update series {series['path']} because of {e}")
|
||||
else:
|
||||
sync_episodes(series_id=int(series_id), send_event=True)
|
||||
sync_episodes(series_id=int(series_id), send_event=False)
|
||||
event_stream(type='series', action='update', payload=int(series_id))
|
||||
logging.debug('BAZARR updated this series into the database:{}'.format(path_mappings.path_replace(
|
||||
series['path'])))
|
||||
|
|
|
@ -22,7 +22,7 @@ def get_profile_list():
|
|||
url_sonarr_api_series = url_sonarr() + "/api/v3/languageprofile?apikey=" + apikey_sonarr
|
||||
|
||||
try:
|
||||
profiles_json = requests.get(url_sonarr_api_series, timeout=60, verify=False, headers=headers)
|
||||
profiles_json = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get profiles from Sonarr. Connection Error.")
|
||||
return None
|
||||
|
@ -55,7 +55,7 @@ def get_tags():
|
|||
url_sonarr_api_series = url_sonarr() + "/api/v3/tag?apikey=" + apikey_sonarr
|
||||
|
||||
try:
|
||||
tagsDict = requests.get(url_sonarr_api_series, timeout=60, verify=False, headers=headers)
|
||||
tagsDict = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.exception("BAZARR Error trying to get tags from Sonarr. Connection Error.")
|
||||
return []
|
||||
|
@ -73,7 +73,7 @@ def get_series_from_sonarr_api(url, apikey_sonarr, sonarr_series_id=None):
|
|||
url_sonarr_api_series = url + "/api/{0}series/{1}?apikey={2}".format(
|
||||
'' if get_sonarr_info.is_legacy() else 'v3/', sonarr_series_id if sonarr_series_id else "", apikey_sonarr)
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_series, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_series, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if e.response.status_code:
|
||||
|
@ -108,7 +108,7 @@ def get_episodes_from_sonarr_api(url, apikey_sonarr, series_id=None, episode_id=
|
|||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_episode, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_episode, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get episodes from Sonarr. Http error.")
|
||||
|
@ -136,7 +136,7 @@ def get_episodesFiles_from_sonarr_api(url, apikey_sonarr, series_id=None, episod
|
|||
return
|
||||
|
||||
try:
|
||||
r = requests.get(url_sonarr_api_episodeFiles, timeout=60, verify=False, headers=headers)
|
||||
r = requests.get(url_sonarr_api_episodeFiles, timeout=int(settings.sonarr.http_timeout), verify=False, headers=headers)
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError:
|
||||
logging.exception("BAZARR Error trying to get episodeFiles from Sonarr. Http error.")
|
||||
|
|
|
@ -8,12 +8,12 @@ import ast
|
|||
from subliminal_patch import core, search_external_subtitles
|
||||
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from app.database import get_profiles_list, get_profile_cutoff, TableMovies
|
||||
from languages.get_languages import alpha2_from_alpha3, language_from_alpha2, get_language_set
|
||||
from app.database import get_profiles_list, get_profile_cutoff, TableMovies, get_audio_profile_languages
|
||||
from languages.get_languages import alpha2_from_alpha3, get_language_set
|
||||
from app.config import settings
|
||||
from utilities.helper import get_subtitle_destination_folder
|
||||
from utilities.path_mappings import path_mappings
|
||||
from subtitles.tools.embedded_subs_reader import embedded_subs_reader
|
||||
from utilities.video_analyzer import embedded_subs_reader
|
||||
from app.event_handler import event_stream, show_progress, hide_progress
|
||||
from subtitles.indexer.utils import guess_external_subtitles, get_external_subtitles_path
|
||||
|
||||
|
@ -168,8 +168,8 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
if desired_subtitles_temp:
|
||||
for language in desired_subtitles_temp['items']:
|
||||
if language['audio_exclude'] == "True":
|
||||
if language_from_alpha2(language['language']) in ast.literal_eval(
|
||||
movie_subtitles['audio_language']):
|
||||
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
|
||||
movie_subtitles['audio_language'])):
|
||||
continue
|
||||
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
|
||||
|
||||
|
@ -202,8 +202,9 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
if cutoff_temp_list:
|
||||
for cutoff_temp in cutoff_temp_list:
|
||||
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
|
||||
if cutoff_temp['audio_exclude'] == 'True' and language_from_alpha2(cutoff_temp['language']) in \
|
||||
ast.literal_eval(movie_subtitles['audio_language']):
|
||||
if cutoff_temp['audio_exclude'] == 'True' and \
|
||||
any(x['code2'] == cutoff_temp['language'] for x in
|
||||
get_audio_profile_languages(movie_subtitles['audio_language'])):
|
||||
cutoff_met = True
|
||||
elif cutoff_language in actual_subtitles_list:
|
||||
cutoff_met = True
|
||||
|
@ -251,9 +252,7 @@ def list_missing_subtitles_movies(no=None, send_event=True):
|
|||
event_stream(type='badges')
|
||||
|
||||
|
||||
def movies_full_scan_subtitles():
|
||||
use_ffprobe_cache = settings.radarr.getboolean('use_ffprobe_cache')
|
||||
|
||||
def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe_cache')):
|
||||
movies = TableMovies.select(TableMovies.path).dicts()
|
||||
|
||||
count_movies = len(movies)
|
||||
|
@ -263,8 +262,7 @@ def movies_full_scan_subtitles():
|
|||
name='Movies subtitles',
|
||||
value=i,
|
||||
count=count_movies)
|
||||
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']),
|
||||
use_cache=use_ffprobe_cache)
|
||||
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']), use_cache=use_cache)
|
||||
|
||||
hide_progress(id='movies_disk_scan')
|
||||
|
||||
|
|
|
@ -8,12 +8,12 @@ import ast
|
|||
from subliminal_patch import core, search_external_subtitles
|
||||
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from app.database import get_profiles_list, get_profile_cutoff, TableEpisodes, TableShows
|
||||
from languages.get_languages import alpha2_from_alpha3, language_from_alpha2, get_language_set
|
||||
from app.database import get_profiles_list, get_profile_cutoff, TableEpisodes, TableShows, get_audio_profile_languages
|
||||
from languages.get_languages import alpha2_from_alpha3, get_language_set
|
||||
from app.config import settings
|
||||
from utilities.helper import get_subtitle_destination_folder
|
||||
from utilities.path_mappings import path_mappings
|
||||
from subtitles.tools.embedded_subs_reader import embedded_subs_reader
|
||||
from utilities.video_analyzer import embedded_subs_reader
|
||||
from app.event_handler import event_stream, show_progress, hide_progress
|
||||
from subtitles.indexer.utils import guess_external_subtitles, get_external_subtitles_path
|
||||
|
||||
|
@ -176,8 +176,8 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
|
|||
if desired_subtitles_temp:
|
||||
for language in desired_subtitles_temp['items']:
|
||||
if language['audio_exclude'] == "True":
|
||||
if language_from_alpha2(language['language']) in ast.literal_eval(
|
||||
episode_subtitles['audio_language']):
|
||||
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
|
||||
episode_subtitles['audio_language'])):
|
||||
continue
|
||||
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
|
||||
|
||||
|
@ -210,8 +210,9 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
|
|||
if cutoff_temp_list:
|
||||
for cutoff_temp in cutoff_temp_list:
|
||||
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
|
||||
if cutoff_temp['audio_exclude'] == 'True' and language_from_alpha2(cutoff_temp['language']) in \
|
||||
ast.literal_eval(episode_subtitles['audio_language']):
|
||||
if cutoff_temp['audio_exclude'] == 'True' and \
|
||||
any(x['code2'] == cutoff_temp['language'] for x in
|
||||
get_audio_profile_languages(episode_subtitles['audio_language'])):
|
||||
cutoff_met = True
|
||||
elif cutoff_language in actual_subtitles_list:
|
||||
cutoff_met = True
|
||||
|
@ -261,9 +262,7 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
|
|||
event_stream(type='badges')
|
||||
|
||||
|
||||
def series_full_scan_subtitles():
|
||||
use_ffprobe_cache = settings.sonarr.getboolean('use_ffprobe_cache')
|
||||
|
||||
def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe_cache')):
|
||||
episodes = TableEpisodes.select(TableEpisodes.path).dicts()
|
||||
|
||||
count_episodes = len(episodes)
|
||||
|
@ -273,7 +272,7 @@ def series_full_scan_subtitles():
|
|||
name='Episodes subtitles',
|
||||
value=i,
|
||||
count=count_episodes)
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=use_ffprobe_cache)
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=use_cache)
|
||||
|
||||
hide_progress(id='episodes_disk_scan')
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ from subliminal_patch.core_persistent import list_all_subtitles, download_subtit
|
|||
from subliminal_patch.score import ComputeScore
|
||||
|
||||
from languages.get_languages import alpha3_from_alpha2
|
||||
from app.config import get_scores, settings, get_array_from, get_settings
|
||||
from app.config import get_scores, settings, get_array_from
|
||||
from utilities.helper import get_target_folder, force_unicode
|
||||
from app.database import get_profiles_list
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ def movies_download_subtitles(no):
|
|||
else:
|
||||
count_movie = 0
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
|
||||
audio_language_list = get_audio_profile_languages(movie['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -77,21 +77,8 @@ def movies_download_subtitles(no):
|
|||
check_if_still_required=True):
|
||||
|
||||
if result:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
|
||||
history_log_movie(1, no, message, path, language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications_movie(no, message)
|
||||
history_log_movie(1, no, result)
|
||||
send_notifications_movie(no, result.message)
|
||||
|
||||
hide_progress(id='movie_search_progress_{}'.format(no))
|
||||
|
|
|
@ -26,7 +26,7 @@ def series_download_subtitles(no):
|
|||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.tags,
|
||||
TableShows.seriesType,
|
||||
TableEpisodes.audio_language,
|
||||
|
@ -57,7 +57,7 @@ def series_download_subtitles(no):
|
|||
value=i,
|
||||
count=count_episodes_details)
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
|
||||
audio_language_list = get_audio_profile_languages(episode['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -76,28 +76,14 @@ def series_download_subtitles(no):
|
|||
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
|
||||
languages,
|
||||
audio_language,
|
||||
str(episode['scene_name']),
|
||||
str(episode['sceneName']),
|
||||
episode['title'],
|
||||
'series',
|
||||
check_if_still_required=True):
|
||||
if result:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
|
||||
history_log(1, no, episode['sonarrEpisodeId'], message, path, language_code, provider, score,
|
||||
subs_id, subs_path)
|
||||
send_notifications(no, episode['sonarrEpisodeId'], message)
|
||||
history_log(1, no, episode['sonarrEpisodeId'], result)
|
||||
send_notifications(no, episode['sonarrEpisodeId'], result.message)
|
||||
else:
|
||||
logging.info("BAZARR All providers are throttled")
|
||||
break
|
||||
|
@ -112,7 +98,7 @@ def episode_download_subtitles(no, send_progress=False):
|
|||
TableEpisodes.missing_subtitles,
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.sceneName,
|
||||
TableShows.tags,
|
||||
TableShows.title,
|
||||
TableShows.sonarrSeriesId,
|
||||
|
@ -142,7 +128,7 @@ def episode_download_subtitles(no, send_progress=False):
|
|||
value=0,
|
||||
count=1)
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
|
||||
audio_language_list = get_audio_profile_languages(episode['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -161,28 +147,14 @@ def episode_download_subtitles(no, send_progress=False):
|
|||
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
|
||||
languages,
|
||||
audio_language,
|
||||
str(episode['scene_name']),
|
||||
str(episode['sceneName']),
|
||||
episode['title'],
|
||||
'series',
|
||||
check_if_still_required=True):
|
||||
if result:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
|
||||
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
|
||||
language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
|
||||
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
|
||||
|
||||
if send_progress:
|
||||
hide_progress(id='episode_search_progress_{}'.format(no))
|
||||
|
|
|
@ -28,11 +28,11 @@ def postprocessing(command, path):
|
|||
except Exception as e:
|
||||
logging.error('BAZARR Post-processing failed for file ' + path + ' : ' + repr(e))
|
||||
else:
|
||||
if out == "":
|
||||
logging.info(
|
||||
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
||||
elif err:
|
||||
if err:
|
||||
logging.error(
|
||||
'BAZARR Post-processing result for file ' + path + ' : ' + err.replace('\n', ' ').replace('\r', ' '))
|
||||
elif out == "":
|
||||
logging.info(
|
||||
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
||||
else:
|
||||
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
||||
|
|
|
@ -5,7 +5,7 @@ import logging
|
|||
|
||||
from app.config import settings
|
||||
from utilities.path_mappings import path_mappings
|
||||
from utilities.post_processing import pp_replace
|
||||
from utilities.post_processing import pp_replace, set_chmod
|
||||
from languages.get_languages import alpha2_from_alpha3, alpha2_from_language, alpha3_from_language, language_from_alpha3
|
||||
from app.database import TableEpisodes, TableMovies
|
||||
from utilities.analytics import track_event
|
||||
|
@ -14,10 +14,27 @@ from sonarr.notify import notify_sonarr
|
|||
from app.event_handler import event_stream
|
||||
|
||||
from .utils import _get_download_code3
|
||||
from .sync import sync_subtitles
|
||||
from .post_processing import postprocessing
|
||||
|
||||
|
||||
class ProcessSubtitlesResult:
|
||||
def __init__(self, message, reversed_path, downloaded_language_code2, downloaded_provider, score, forced,
|
||||
subtitle_id, reversed_subtitles_path, hearing_impaired):
|
||||
self.message = message
|
||||
self.path = reversed_path
|
||||
self.provider = downloaded_provider
|
||||
self.score = score
|
||||
self.subs_id = subtitle_id
|
||||
self.subs_path = reversed_subtitles_path
|
||||
|
||||
if hearing_impaired:
|
||||
self.language_code = downloaded_language_code2 + ":hi"
|
||||
elif forced:
|
||||
self.language_code = downloaded_language_code2 + ":forced"
|
||||
else:
|
||||
self.language_code = downloaded_language_code2
|
||||
|
||||
|
||||
def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_upgrade=False, is_manual=False):
|
||||
use_postprocessing = settings.general.getboolean('use_postprocessing')
|
||||
postprocessing_cmd = settings.general.postprocessing_cmd
|
||||
|
@ -59,6 +76,8 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
return
|
||||
series_id = episode_metadata['sonarrSeriesId']
|
||||
episode_id = episode_metadata['sonarrEpisodeId']
|
||||
|
||||
from .sync import sync_subtitles
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
|
@ -74,6 +93,8 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
return
|
||||
series_id = ""
|
||||
episode_id = movie_metadata['radarrId']
|
||||
|
||||
from .sync import sync_subtitles
|
||||
sync_subtitles(video_path=path, srt_path=downloaded_path,
|
||||
forced=subtitle.language.forced,
|
||||
srt_lang=downloaded_language_code2, media_type=media_type,
|
||||
|
@ -95,6 +116,7 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
if not use_pp_threshold or (use_pp_threshold and percent_score < pp_threshold):
|
||||
logging.debug("BAZARR Using post-processing command: {}".format(command))
|
||||
postprocessing(command, path)
|
||||
set_chmod(subtitles_path=downloaded_path)
|
||||
else:
|
||||
logging.debug("BAZARR post-processing skipped because subtitles score isn't below this "
|
||||
"threshold value: " + str(pp_threshold) + "%")
|
||||
|
@ -115,5 +137,12 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
|
|||
|
||||
track_event(category=downloaded_provider, action=action, label=downloaded_language)
|
||||
|
||||
return message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score, \
|
||||
subtitle.language.forced, subtitle.id, reversed_subtitles_path, subtitle.language.hi
|
||||
return ProcessSubtitlesResult(message=message,
|
||||
reversed_path=reversed_path,
|
||||
downloaded_language_code2=downloaded_language_code2,
|
||||
downloaded_provider=downloaded_provider,
|
||||
score=subtitle.score,
|
||||
forced=subtitle.language.forced,
|
||||
subtitle_id=subtitle.id,
|
||||
reversed_subtitles_path=reversed_subtitles_path,
|
||||
hearing_impaired=subtitle.language.hi)
|
||||
|
|
|
@ -23,7 +23,7 @@ def refine_from_db(path, video):
|
|||
TableEpisodes.title.alias('episodeTitle'),
|
||||
TableShows.year,
|
||||
TableShows.tvdbId,
|
||||
TableShows.alternateTitles,
|
||||
TableShows.alternativeTitles,
|
||||
TableEpisodes.format,
|
||||
TableEpisodes.resolution,
|
||||
TableEpisodes.video_codec,
|
||||
|
@ -43,10 +43,11 @@ def refine_from_db(path, video):
|
|||
|
||||
# Only refine year as a fallback
|
||||
if not video.year and data['year']:
|
||||
if int(data['year']) > 0: video.year = int(data['year'])
|
||||
if int(data['year']) > 0:
|
||||
video.year = int(data['year'])
|
||||
|
||||
video.series_tvdb_id = int(data['tvdbId'])
|
||||
video.alternative_series = ast.literal_eval(data['alternateTitles'])
|
||||
video.alternative_series = ast.literal_eval(data['alternativeTitles'])
|
||||
if data['imdbId'] and not video.series_imdb_id:
|
||||
video.series_imdb_id = data['imdbId']
|
||||
if not video.source:
|
||||
|
@ -77,7 +78,8 @@ def refine_from_db(path, video):
|
|||
|
||||
# Only refine year as a fallback
|
||||
if not video.year and data['year']:
|
||||
if int(data['year']) > 0: video.year = int(data['year'])
|
||||
if int(data['year']) > 0:
|
||||
video.year = int(data['year'])
|
||||
|
||||
if data['imdbId'] and not video.imdb_id:
|
||||
video.imdb_id = data['imdbId']
|
||||
|
|
|
@ -7,7 +7,7 @@ from subliminal import Movie
|
|||
|
||||
from utilities.path_mappings import path_mappings
|
||||
from app.database import TableEpisodes, TableMovies
|
||||
from subtitles.tools.embedded_subs_reader import parse_video_metadata
|
||||
from utilities.video_analyzer import parse_video_metadata
|
||||
|
||||
|
||||
def refine_from_ffprobe(path, video):
|
||||
|
@ -32,7 +32,7 @@ def refine_from_ffprobe(path, video):
|
|||
data = parse_video_metadata(file=path, file_size=file_id['file_size'],
|
||||
episode_file_id=file_id['episode_file_id'])
|
||||
|
||||
if 'ffprobe' not in data and 'mediainfo' not in data:
|
||||
if not data or ('ffprobe' not in data and 'mediainfo' not in data):
|
||||
logging.debug("No cache available for this file: {}".format(path))
|
||||
return video
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ from languages.get_languages import language_from_alpha2
|
|||
from utilities.path_mappings import path_mappings
|
||||
from subtitles.indexer.series import store_subtitles
|
||||
from subtitles.indexer.movies import store_subtitles_movie
|
||||
from subtitles.processing import ProcessSubtitlesResult
|
||||
from sonarr.history import history_log
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.notify import notify_sonarr
|
||||
|
@ -35,7 +36,15 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
language_log += ':forced'
|
||||
language_string += ' forced'
|
||||
|
||||
result = language_string + " subtitles deleted from disk."
|
||||
result = ProcessSubtitlesResult(message=language_string + " subtitles deleted from disk.",
|
||||
reversed_path=path_mappings.path_replace_reverse(media_path),
|
||||
downloaded_language_code2=language_log,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=None,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=path_mappings.path_replace_reverse(subtitles_path),
|
||||
hearing_impaired=None)
|
||||
|
||||
if media_type == 'series':
|
||||
try:
|
||||
|
@ -45,9 +54,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
history_log(0, sonarr_series_id, sonarr_episode_id, result, language=language_log,
|
||||
video_path=path_mappings.path_replace_reverse(media_path),
|
||||
subtitles_path=path_mappings.path_replace_reverse(subtitles_path))
|
||||
history_log(0, sonarr_series_id, sonarr_episode_id, result)
|
||||
store_subtitles(path_mappings.path_replace_reverse(media_path), media_path)
|
||||
notify_sonarr(sonarr_series_id)
|
||||
event_stream(type='series', action='update', payload=sonarr_series_id)
|
||||
|
@ -61,9 +68,7 @@ def delete_subtitles(media_type, language, forced, hi, media_path, subtitles_pat
|
|||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
|
||||
return False
|
||||
else:
|
||||
history_log_movie(0, radarr_id, result, language=language_log,
|
||||
video_path=path_mappings.path_replace_reverse_movie(media_path),
|
||||
subtitles_path=path_mappings.path_replace_reverse_movie(subtitles_path))
|
||||
history_log_movie(0, radarr_id, result)
|
||||
store_subtitles_movie(path_mappings.path_replace_reverse_movie(media_path), media_path)
|
||||
notify_radarr(radarr_id)
|
||||
event_stream(type='movie-wanted', action='update', payload=radarr_id)
|
||||
|
|
|
@ -4,19 +4,22 @@ import os
|
|||
import logging
|
||||
|
||||
from subliminal_patch.subtitle import Subtitle
|
||||
from subliminal_patch.core import get_subtitle_path
|
||||
from subzero.language import Language
|
||||
|
||||
from app.config import settings
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from languages.get_languages import alpha3_from_alpha2
|
||||
|
||||
|
||||
def subtitles_apply_mods(language, subtitle_path, mods, use_original_format):
|
||||
def subtitles_apply_mods(language, subtitle_path, mods, use_original_format, video_path):
|
||||
language = alpha3_from_alpha2(language)
|
||||
custom = CustomLanguage.from_value(language, "alpha3")
|
||||
if custom is None:
|
||||
lang_obj = Language(language)
|
||||
else:
|
||||
lang_obj = custom.subzero_language()
|
||||
single = settings.general.getboolean('single_language')
|
||||
|
||||
sub = Subtitle(lang_obj, mods=mods, original_format=use_original_format)
|
||||
with open(subtitle_path, 'rb') as f:
|
||||
|
@ -31,8 +34,17 @@ def subtitles_apply_mods(language, subtitle_path, mods, use_original_format):
|
|||
|
||||
content = sub.get_modified_content()
|
||||
if content:
|
||||
if hasattr(sub, 'mods') and isinstance(sub.mods, list) and 'remove_HI' in sub.mods:
|
||||
modded_subtitles_path = get_subtitle_path(video_path, None if single else sub.language,
|
||||
forced_tag=sub.language.forced, hi_tag=False, tags=[])
|
||||
else:
|
||||
modded_subtitles_path = subtitle_path
|
||||
|
||||
if os.path.exists(subtitle_path):
|
||||
os.remove(subtitle_path)
|
||||
|
||||
with open(subtitle_path, 'wb') as f:
|
||||
if os.path.exists(modded_subtitles_path):
|
||||
os.remove(modded_subtitles_path)
|
||||
|
||||
with open(modded_subtitles_path, 'wb') as f:
|
||||
f.write(content)
|
||||
|
|
|
@ -8,6 +8,7 @@ from ffsubsync.ffsubsync import run, make_parser
|
|||
from utilities.binaries import get_binary
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.history import history_log
|
||||
from subtitles.processing import ProcessSubtitlesResult
|
||||
from languages.get_languages import language_from_alpha2
|
||||
from utilities.path_mappings import path_mappings
|
||||
from app.config import settings
|
||||
|
@ -83,14 +84,21 @@ class SubSyncer:
|
|||
"scale factor of {2}.".format(language_from_alpha2(srt_lang), offset_seconds,
|
||||
"{:.2f}".format(framerate_scale_factor))
|
||||
|
||||
result = ProcessSubtitlesResult(message=message,
|
||||
reversed_path=path_mappings.path_replace_reverse(self.reference),
|
||||
downloaded_language_code2=srt_lang,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=None,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=srt_path,
|
||||
hearing_impaired=None)
|
||||
|
||||
if media_type == 'series':
|
||||
history_log(action=5, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,
|
||||
description=message, video_path=path_mappings.path_replace_reverse(self.reference),
|
||||
language=srt_lang, subtitles_path=srt_path)
|
||||
result=result)
|
||||
else:
|
||||
history_log_movie(action=5, radarr_id=radarr_id, description=message,
|
||||
video_path=path_mappings.path_replace_reverse_movie(self.reference),
|
||||
language=srt_lang, subtitles_path=srt_path)
|
||||
history_log_movie(action=5, radarr_id=radarr_id, result=result)
|
||||
else:
|
||||
logging.error('BAZARR unable to sync subtitles: {0}'.format(self.srtin))
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ from languages.custom_lang import CustomLanguage
|
|||
from languages.get_languages import alpha3_from_alpha2, language_from_alpha2, language_from_alpha3
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.history import history_log
|
||||
from subtitles.processing import ProcessSubtitlesResult
|
||||
|
||||
|
||||
def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, forced, hi, media_type, sonarr_series_id,
|
||||
|
@ -84,11 +85,19 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
|
|||
|
||||
message = f"{language_from_alpha2(from_lang)} subtitles translated to {language_from_alpha3(to_lang)}."
|
||||
|
||||
result = ProcessSubtitlesResult(message=message,
|
||||
reversed_path=video_path,
|
||||
downloaded_language_code2=to_lang,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=None,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=dest_srt_file,
|
||||
hearing_impaired=None)
|
||||
|
||||
if media_type == 'series':
|
||||
history_log(action=6, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id,
|
||||
description=message, video_path=video_path, language=to_lang, subtitles_path=dest_srt_file)
|
||||
history_log(action=6, sonarr_series_id=sonarr_series_id, sonarr_episode_id=sonarr_episode_id, result=result)
|
||||
else:
|
||||
history_log_movie(action=6, radarr_id=radarr_id, description=message,
|
||||
video_path=video_path, language=to_lang, subtitles_path=dest_srt_file)
|
||||
history_log_movie(action=6, radarr_id=radarr_id, result=result)
|
||||
|
||||
return dest_srt_file
|
||||
|
|
|
@ -1,132 +1,34 @@
|
|||
# coding=utf-8
|
||||
# fmt: off
|
||||
|
||||
import os
|
||||
import logging
|
||||
import operator
|
||||
|
||||
from functools import reduce
|
||||
from peewee import fn
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
|
||||
from app.config import settings
|
||||
from utilities.path_mappings import path_mappings
|
||||
from subtitles.indexer.series import store_subtitles
|
||||
from subtitles.indexer.movies import store_subtitles_movie
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.history import history_log
|
||||
from app.notifier import send_notifications, send_notifications_movie
|
||||
from app.get_providers import get_providers
|
||||
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes, TableMovies, \
|
||||
TableHistory, TableHistoryMovie
|
||||
from app.event_handler import show_progress, hide_progress
|
||||
|
||||
from app.get_providers import get_providers
|
||||
from app.notifier import send_notifications, send_notifications_movie
|
||||
from radarr.history import history_log_movie
|
||||
from sonarr.history import history_log
|
||||
from subtitles.indexer.movies import store_subtitles_movie
|
||||
from subtitles.indexer.series import store_subtitles
|
||||
from utilities.path_mappings import path_mappings
|
||||
from .download import generate_subtitles
|
||||
|
||||
|
||||
def upgrade_subtitles():
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = ((datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
||||
datetime(1970, 1, 1)).total_seconds()
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 4, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
upgradable_episodes_conditions = [(TableHistory.action << query_actions),
|
||||
(TableHistory.timestamp > minimum_timestamp),
|
||||
(TableHistory.score.is_null(False))]
|
||||
upgradable_episodes_conditions += get_exclusion_clause('series')
|
||||
upgradable_episodes = TableHistory.select(TableHistory.video_path,
|
||||
TableHistory.language,
|
||||
TableHistory.score,
|
||||
TableShows.tags,
|
||||
TableShows.profileId,
|
||||
TableEpisodes.audio_language,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.title,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableHistory.action,
|
||||
TableHistory.subtitles_path,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
fn.MAX(TableHistory.timestamp).alias('timestamp'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season,
|
||||
TableEpisodes.episode,
|
||||
TableShows.title.alias('seriesTitle'),
|
||||
TableShows.seriesType)\
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
|
||||
.where(reduce(operator.and_, upgradable_episodes_conditions))\
|
||||
.group_by(TableHistory.video_path, TableHistory.language)\
|
||||
.dicts()
|
||||
upgradable_episodes_not_perfect = []
|
||||
for upgradable_episode in upgradable_episodes:
|
||||
if upgradable_episode['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_episode['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_episode['score']) < 360 or (settings.general.getboolean('upgrade_manual') and
|
||||
upgradable_episode['action'] in [2, 4, 6]):
|
||||
upgradable_episodes_not_perfect.append(upgradable_episode)
|
||||
|
||||
episodes_to_upgrade = []
|
||||
for episode in upgradable_episodes_not_perfect:
|
||||
if os.path.exists(path_mappings.path_replace(episode['subtitles_path'])) and \
|
||||
os.path.exists(path_mappings.path_replace(episode['video_path'])) and \
|
||||
int(episode['score']) < 357:
|
||||
episodes_to_upgrade.append(episode)
|
||||
use_sonarr = settings.general.getboolean('use_sonarr')
|
||||
use_radarr = settings.general.getboolean('use_radarr')
|
||||
|
||||
if use_sonarr:
|
||||
episodes_to_upgrade = get_upgradable_episode_subtitles()
|
||||
count_episode_to_upgrade = len(episodes_to_upgrade)
|
||||
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
upgradable_movies_conditions = [(TableHistoryMovie.action << query_actions),
|
||||
(TableHistoryMovie.timestamp > minimum_timestamp),
|
||||
(TableHistoryMovie.score.is_null(False))]
|
||||
upgradable_movies_conditions += get_exclusion_clause('movie')
|
||||
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
|
||||
TableHistoryMovie.language,
|
||||
TableHistoryMovie.score,
|
||||
TableMovies.profileId,
|
||||
TableHistoryMovie.action,
|
||||
TableHistoryMovie.subtitles_path,
|
||||
TableMovies.audio_language,
|
||||
TableMovies.sceneName,
|
||||
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
|
||||
TableMovies.monitored,
|
||||
TableMovies.tags,
|
||||
TableMovies.radarrId,
|
||||
TableMovies.title)\
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
|
||||
.where(reduce(operator.and_, upgradable_movies_conditions))\
|
||||
.group_by(TableHistoryMovie.video_path, TableHistoryMovie.language)\
|
||||
.dicts()
|
||||
upgradable_movies_not_perfect = []
|
||||
for upgradable_movie in upgradable_movies:
|
||||
if upgradable_movie['timestamp'] > minimum_timestamp:
|
||||
try:
|
||||
int(upgradable_movie['score'])
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if int(upgradable_movie['score']) < 120 or (settings.general.getboolean('upgrade_manual') and
|
||||
upgradable_movie['action'] in [2, 4, 6]):
|
||||
upgradable_movies_not_perfect.append(upgradable_movie)
|
||||
|
||||
movies_to_upgrade = []
|
||||
for movie in upgradable_movies_not_perfect:
|
||||
if os.path.exists(path_mappings.path_replace_movie(movie['subtitles_path'])) and \
|
||||
os.path.exists(path_mappings.path_replace_movie(movie['video_path'])) and \
|
||||
int(movie['score']) < 117:
|
||||
movies_to_upgrade.append(movie)
|
||||
|
||||
count_movie_to_upgrade = len(movies_to_upgrade)
|
||||
|
||||
if settings.general.getboolean('use_sonarr'):
|
||||
for i, episode in enumerate(episodes_to_upgrade):
|
||||
providers_list = get_providers()
|
||||
|
||||
|
@ -142,20 +44,10 @@ def upgrade_subtitles():
|
|||
if not providers_list:
|
||||
logging.info("BAZARR All providers are throttled")
|
||||
return
|
||||
if episode['language'].endswith('forced'):
|
||||
language = episode['language'].split(':')[0]
|
||||
is_forced = "True"
|
||||
is_hi = "False"
|
||||
elif episode['language'].endswith('hi'):
|
||||
language = episode['language'].split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "True"
|
||||
else:
|
||||
language = episode['language'].split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "False"
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
|
||||
language, is_forced, is_hi = parse_language_string(episode['language'])
|
||||
|
||||
audio_language_list = get_audio_profile_languages(episode['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -164,7 +56,7 @@ def upgrade_subtitles():
|
|||
result = list(generate_subtitles(path_mappings.path_replace(episode['video_path']),
|
||||
[(language, is_hi, is_forced)],
|
||||
audio_language,
|
||||
str(episode['scene_name']),
|
||||
str(episode['sceneName']),
|
||||
episode['seriesTitle'],
|
||||
'series',
|
||||
forced_minimum_score=int(episode['score']),
|
||||
|
@ -172,27 +64,16 @@ def upgrade_subtitles():
|
|||
|
||||
if result:
|
||||
result = result[0]
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles(episode['video_path'], path_mappings.path_replace(episode['video_path']))
|
||||
history_log(3, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
|
||||
language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
|
||||
history_log(3, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
|
||||
|
||||
hide_progress(id='upgrade_episodes_progress')
|
||||
|
||||
if settings.general.getboolean('use_radarr'):
|
||||
if use_radarr:
|
||||
movies_to_upgrade = get_upgradable_movies_subtitles()
|
||||
count_movie_to_upgrade = len(movies_to_upgrade)
|
||||
|
||||
for i, movie in enumerate(movies_to_upgrade):
|
||||
providers_list = get_providers()
|
||||
|
||||
|
@ -205,20 +86,10 @@ def upgrade_subtitles():
|
|||
if not providers_list:
|
||||
logging.info("BAZARR All providers are throttled")
|
||||
return
|
||||
if movie['language'].endswith('forced'):
|
||||
language = movie['language'].split(':')[0]
|
||||
is_forced = "True"
|
||||
is_hi = "False"
|
||||
elif movie['language'].endswith('hi'):
|
||||
language = movie['language'].split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "True"
|
||||
else:
|
||||
language = movie['language'].split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "False"
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
|
||||
language, is_forced, is_hi = parse_language_string(movie['language'])
|
||||
|
||||
audio_language_list = get_audio_profile_languages(movie['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -234,24 +105,152 @@ def upgrade_subtitles():
|
|||
is_upgrade=True))
|
||||
if result:
|
||||
result = result[0]
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles_movie(movie['video_path'],
|
||||
path_mappings.path_replace_movie(movie['video_path']))
|
||||
history_log_movie(3, movie['radarrId'], message, path, language_code, provider, score, subs_id, subs_path)
|
||||
send_notifications_movie(movie['radarrId'], message)
|
||||
history_log_movie(3, movie['radarrId'], result)
|
||||
send_notifications_movie(movie['radarrId'], result.message)
|
||||
|
||||
hide_progress(id='upgrade_movies_progress')
|
||||
|
||||
logging.info('BAZARR Finished searching for Subtitles to upgrade. Check History for more information.')
|
||||
|
||||
|
||||
def get_queries_condition_parameters():
|
||||
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
||||
minimum_timestamp = (datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
|
||||
|
||||
if settings.general.getboolean('upgrade_manual'):
|
||||
query_actions = [1, 2, 3, 4, 6]
|
||||
else:
|
||||
query_actions = [1, 3]
|
||||
|
||||
return [minimum_timestamp, query_actions]
|
||||
|
||||
|
||||
def parse_upgradable_list(upgradable_list, perfect_score, media_type):
|
||||
if media_type == 'series':
|
||||
path_replace_method = path_mappings.path_replace
|
||||
else:
|
||||
path_replace_method = path_mappings.path_replace_movie
|
||||
|
||||
items_to_upgrade = []
|
||||
|
||||
for item in upgradable_list:
|
||||
logging.debug(f"Trying to validate eligibility to upgrade for this subtitles: "
|
||||
f"{item['subtitles_path']}")
|
||||
if (item['video_path'], item['language']) in \
|
||||
[(x['video_path'], x['language']) for x in items_to_upgrade]:
|
||||
logging.debug("Newer video path and subtitles language combination already in list of subtitles to "
|
||||
"upgrade, we skip this one.")
|
||||
continue
|
||||
|
||||
if os.path.exists(path_replace_method(item['subtitles_path'])) and \
|
||||
os.path.exists(path_replace_method(item['video_path'])):
|
||||
logging.debug("Video and subtitles file are still there, we continue the eligibility validation.")
|
||||
pass
|
||||
|
||||
items_to_upgrade.append(item)
|
||||
|
||||
if not settings.general.getboolean('upgrade_manual'):
|
||||
logging.debug("Removing history items for manually downloaded or translated subtitles.")
|
||||
items_to_upgrade = [x for x in items_to_upgrade if x['action'] in [2, 4, 6]]
|
||||
|
||||
logging.debug("Removing history items for already perfectly scored subtitles.")
|
||||
items_to_upgrade = [x for x in items_to_upgrade if x['score'] < perfect_score]
|
||||
|
||||
logging.debug(f"Bazarr will try to upgrade {len(items_to_upgrade)} subtitles.")
|
||||
|
||||
return items_to_upgrade
|
||||
|
||||
|
||||
def parse_language_string(language_string):
|
||||
if language_string.endswith('forced'):
|
||||
language = language_string.split(':')[0]
|
||||
is_forced = "True"
|
||||
is_hi = "False"
|
||||
elif language_string.endswith('hi'):
|
||||
language = language_string.split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "True"
|
||||
else:
|
||||
language = language_string.split(':')[0]
|
||||
is_forced = "False"
|
||||
is_hi = "False"
|
||||
|
||||
return [language, is_forced, is_hi]
|
||||
|
||||
|
||||
def get_upgradable_episode_subtitles():
|
||||
minimum_timestamp, query_actions = get_queries_condition_parameters()
|
||||
|
||||
upgradable_episodes_conditions = [(TableHistory.action << query_actions),
|
||||
(TableHistory.timestamp > minimum_timestamp),
|
||||
(TableHistory.score.is_null(False))]
|
||||
upgradable_episodes_conditions += get_exclusion_clause('series')
|
||||
upgradable_episodes = TableHistory.select(TableHistory.video_path,
|
||||
TableHistory.language,
|
||||
TableHistory.score,
|
||||
TableShows.tags,
|
||||
TableShows.profileId,
|
||||
TableEpisodes.audio_language,
|
||||
TableEpisodes.sceneName,
|
||||
TableEpisodes.title,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableHistory.action,
|
||||
TableHistory.subtitles_path,
|
||||
TableEpisodes.sonarrEpisodeId,
|
||||
TableHistory.timestamp.alias('timestamp'),
|
||||
TableEpisodes.monitored,
|
||||
TableEpisodes.season,
|
||||
TableEpisodes.episode,
|
||||
TableShows.title.alias('seriesTitle'),
|
||||
TableShows.seriesType) \
|
||||
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
|
||||
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
|
||||
.where(reduce(operator.and_, upgradable_episodes_conditions)) \
|
||||
.order_by(TableHistory.timestamp.desc()) \
|
||||
.dicts()
|
||||
|
||||
if not upgradable_episodes:
|
||||
return []
|
||||
else:
|
||||
upgradable_episodes = list(upgradable_episodes)
|
||||
logging.debug(f"{len(upgradable_episodes)} potentially upgradable episode subtitles have been found, let's "
|
||||
f"filter them...")
|
||||
|
||||
return parse_upgradable_list(upgradable_list=upgradable_episodes, perfect_score=357, media_type='series')
|
||||
|
||||
|
||||
def get_upgradable_movies_subtitles():
|
||||
minimum_timestamp, query_actions = get_queries_condition_parameters()
|
||||
|
||||
upgradable_movies_conditions = [(TableHistoryMovie.action << query_actions),
|
||||
(TableHistoryMovie.timestamp > minimum_timestamp),
|
||||
(TableHistoryMovie.score.is_null(False))]
|
||||
upgradable_movies_conditions += get_exclusion_clause('movie')
|
||||
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
|
||||
TableHistoryMovie.language,
|
||||
TableHistoryMovie.score,
|
||||
TableMovies.profileId,
|
||||
TableHistoryMovie.action,
|
||||
TableHistoryMovie.subtitles_path,
|
||||
TableMovies.audio_language,
|
||||
TableMovies.sceneName,
|
||||
TableHistoryMovie.timestamp.alias('timestamp'),
|
||||
TableMovies.monitored,
|
||||
TableMovies.tags,
|
||||
TableMovies.radarrId,
|
||||
TableMovies.title) \
|
||||
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
|
||||
.where(reduce(operator.and_, upgradable_movies_conditions)) \
|
||||
.order_by(TableHistoryMovie.timestamp.desc()) \
|
||||
.dicts()
|
||||
|
||||
if not upgradable_movies:
|
||||
return []
|
||||
else:
|
||||
upgradable_movies = list(upgradable_movies)
|
||||
logging.debug(f"{len(upgradable_movies)} potentially upgradable movie subtitles have been found, let's filter "
|
||||
f"them...")
|
||||
|
||||
return parse_upgradable_list(upgradable_list=upgradable_movies, perfect_score=117, media_type='movie')
|
||||
|
|
|
@ -10,24 +10,24 @@ from subliminal_patch.core import save_subtitles
|
|||
from subliminal_patch.subtitle import Subtitle
|
||||
from pysubs2.formats import get_format_identifier
|
||||
|
||||
from languages.get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2, \
|
||||
alpha2_from_language, alpha3_from_language
|
||||
from languages.get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2
|
||||
from app.config import settings, get_array_from
|
||||
from utilities.helper import get_target_folder, force_unicode
|
||||
from utilities.post_processing import pp_replace
|
||||
from utilities.post_processing import pp_replace, set_chmod
|
||||
from utilities.path_mappings import path_mappings
|
||||
from radarr.notify import notify_radarr
|
||||
from sonarr.notify import notify_sonarr
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from app.database import TableEpisodes, TableMovies, TableShows, get_profiles_list
|
||||
from app.event_handler import event_stream
|
||||
from subtitles.processing import ProcessSubtitlesResult
|
||||
|
||||
from .sync import sync_subtitles
|
||||
from .post_processing import postprocessing
|
||||
|
||||
|
||||
def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_type, subtitle, audio_language):
|
||||
logging.debug('BAZARR Manually uploading subtitles for this file: ' + path)
|
||||
def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, audio_language):
|
||||
logging.debug(f'BAZARR Manually uploading subtitles for this file: {path}')
|
||||
|
||||
single = settings.general.getboolean('single_language')
|
||||
|
||||
|
@ -120,7 +120,6 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
|
|||
modifier_string = " forced"
|
||||
else:
|
||||
modifier_string = ""
|
||||
message = language_from_alpha3(language) + modifier_string + " Subtitles manually uploaded."
|
||||
|
||||
if hi:
|
||||
modifier_code = ":hi"
|
||||
|
@ -131,8 +130,6 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
|
|||
uploaded_language_code3 = language + modifier_code
|
||||
uploaded_language = language_from_alpha3(language) + modifier_string
|
||||
uploaded_language_code2 = alpha2_from_alpha3(language) + modifier_code
|
||||
audio_language_code2 = alpha2_from_language(audio_language)
|
||||
audio_language_code3 = alpha3_from_language(audio_language)
|
||||
|
||||
if media_type == 'series':
|
||||
if not episode_metadata:
|
||||
|
@ -152,9 +149,10 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
|
|||
|
||||
if use_postprocessing:
|
||||
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, uploaded_language_code2,
|
||||
uploaded_language_code3, audio_language, audio_language_code2, audio_language_code3, 100,
|
||||
"1", "manual", series_id, episode_id)
|
||||
uploaded_language_code3, audio_language['name'], audio_language['code2'],
|
||||
audio_language['code3'], 100, "1", "manual", series_id, episode_id)
|
||||
postprocessing(command, path)
|
||||
set_chmod(subtitles_path=subtitle_path)
|
||||
|
||||
if media_type == 'series':
|
||||
reversed_path = path_mappings.path_replace_reverse(path)
|
||||
|
@ -169,4 +167,15 @@ def manual_upload_subtitle(path, language, forced, hi, title, scene_name, media_
|
|||
event_stream(type='movie', action='update', payload=movie_metadata['radarrId'])
|
||||
event_stream(type='movie-wanted', action='delete', payload=movie_metadata['radarrId'])
|
||||
|
||||
return message, reversed_path, reversed_subtitles_path
|
||||
result = ProcessSubtitlesResult(message=language_from_alpha3(language) + modifier_string + " Subtitles manually "
|
||||
"uploaded.",
|
||||
reversed_path=reversed_path,
|
||||
downloaded_language_code2=uploaded_language_code2,
|
||||
downloaded_provider=None,
|
||||
score=None,
|
||||
forced=None,
|
||||
subtitle_id=None,
|
||||
reversed_subtitles_path=reversed_subtitles_path,
|
||||
hearing_impaired=None)
|
||||
|
||||
return result
|
||||
|
|
|
@ -20,7 +20,7 @@ from ..download import generate_subtitles
|
|||
|
||||
|
||||
def _wanted_movie(movie):
|
||||
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
|
||||
audio_language_list = get_audio_profile_languages(movie['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -53,24 +53,10 @@ def _wanted_movie(movie):
|
|||
check_if_still_required=True):
|
||||
|
||||
if result:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
|
||||
history_log_movie(1, movie['radarrId'], message, path, language_code, provider, score,
|
||||
subs_id, subs_path)
|
||||
history_log_movie(1, movie['radarrId'], result)
|
||||
event_stream(type='movie-wanted', action='delete', payload=movie['radarrId'])
|
||||
send_notifications_movie(movie['radarrId'], message)
|
||||
send_notifications_movie(movie['radarrId'], result.message)
|
||||
|
||||
|
||||
def wanted_download_subtitles_movie(radarr_id):
|
||||
|
|
|
@ -20,7 +20,7 @@ from ..download import generate_subtitles
|
|||
|
||||
|
||||
def _wanted_episode(episode):
|
||||
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
|
||||
audio_language_list = get_audio_profile_languages(episode['audio_language'])
|
||||
if len(audio_language_list) > 0:
|
||||
audio_language = audio_language_list[0]['name']
|
||||
else:
|
||||
|
@ -47,30 +47,16 @@ def _wanted_episode(episode):
|
|||
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
|
||||
languages,
|
||||
audio_language,
|
||||
str(episode['scene_name']),
|
||||
str(episode['sceneName']),
|
||||
episode['title'],
|
||||
'series',
|
||||
check_if_still_required=True):
|
||||
if result:
|
||||
message = result[0]
|
||||
path = result[1]
|
||||
forced = result[5]
|
||||
if result[8]:
|
||||
language_code = result[2] + ":hi"
|
||||
elif forced:
|
||||
language_code = result[2] + ":forced"
|
||||
else:
|
||||
language_code = result[2]
|
||||
provider = result[3]
|
||||
score = result[4]
|
||||
subs_id = result[6]
|
||||
subs_path = result[7]
|
||||
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
|
||||
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message, path,
|
||||
language_code, provider, score, subs_id, subs_path)
|
||||
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
|
||||
event_stream(type='series', action='update', payload=episode['sonarrSeriesId'])
|
||||
event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId'])
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], message)
|
||||
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
|
||||
|
||||
|
||||
def wanted_download_subtitles(sonarr_episode_id):
|
||||
|
@ -79,7 +65,7 @@ def wanted_download_subtitles(sonarr_episode_id):
|
|||
TableEpisodes.sonarrEpisodeId,
|
||||
TableEpisodes.sonarrSeriesId,
|
||||
TableEpisodes.audio_language,
|
||||
TableEpisodes.scene_name,
|
||||
TableEpisodes.sceneName,
|
||||
TableEpisodes.failedAttempts,
|
||||
TableShows.title)\
|
||||
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
|
||||
|
|
|
@ -47,27 +47,29 @@ def get_backup_files(fullpath=True):
|
|||
|
||||
def backup_to_zip():
|
||||
now = datetime.now()
|
||||
database_backup_file = None
|
||||
now_string = now.strftime("%Y.%m.%d_%H.%M.%S")
|
||||
backup_filename = f"bazarr_backup_v{os.environ['BAZARR_VERSION']}_{now_string}.zip"
|
||||
logging.debug(f'Backup filename will be: {backup_filename}')
|
||||
|
||||
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
|
||||
logging.debug(f'Database file path to backup is: {database_src_file}')
|
||||
if not settings.postgresql.getboolean('enabled'):
|
||||
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
|
||||
logging.debug(f'Database file path to backup is: {database_src_file}')
|
||||
|
||||
try:
|
||||
database_src_con = sqlite3.connect(database_src_file)
|
||||
try:
|
||||
database_src_con = sqlite3.connect(database_src_file)
|
||||
|
||||
database_backup_file = os.path.join(get_backup_path(), 'bazarr_temp.db')
|
||||
database_backup_con = sqlite3.connect(database_backup_file)
|
||||
database_backup_file = os.path.join(get_backup_path(), 'bazarr_temp.db')
|
||||
database_backup_con = sqlite3.connect(database_backup_file)
|
||||
|
||||
with database_backup_con:
|
||||
database_src_con.backup(database_backup_con)
|
||||
with database_backup_con:
|
||||
database_src_con.backup(database_backup_con)
|
||||
|
||||
database_backup_con.close()
|
||||
database_src_con.close()
|
||||
except Exception:
|
||||
database_backup_file = None
|
||||
logging.exception('Unable to backup database file.')
|
||||
database_backup_con.close()
|
||||
database_src_con.close()
|
||||
except Exception:
|
||||
database_backup_file = None
|
||||
logging.exception('Unable to backup database file.')
|
||||
|
||||
config_file = os.path.join(args.config_dir, 'config', 'config.ini')
|
||||
logging.debug(f'Config file path to backup is: {config_file}')
|
||||
|
@ -75,15 +77,14 @@ def backup_to_zip():
|
|||
with ZipFile(os.path.join(get_backup_path(), backup_filename), 'w') as backupZip:
|
||||
if database_backup_file:
|
||||
backupZip.write(database_backup_file, 'bazarr.db')
|
||||
try:
|
||||
os.remove(database_backup_file)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
|
||||
else:
|
||||
logging.debug('Database file is not included in backup. See previous exception')
|
||||
backupZip.write(config_file, 'config.ini')
|
||||
|
||||
try:
|
||||
os.remove(database_backup_file)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
|
||||
|
||||
|
||||
def restore_from_backup():
|
||||
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
|
||||
|
@ -97,30 +98,34 @@ def restore_from_backup():
|
|||
os.remove(restore_config_path)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to restore or delete config.ini to {dest_config_path}')
|
||||
|
||||
try:
|
||||
shutil.copy(restore_database_path, dest_database_path)
|
||||
os.remove(restore_database_path)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to restore or delete db to {dest_database_path}')
|
||||
else:
|
||||
if not settings.postgresql.getboolean('enabled'):
|
||||
try:
|
||||
if os.path.isfile(dest_database_path + '-shm'):
|
||||
os.remove(dest_database_path + '-shm')
|
||||
if os.path.isfile(dest_database_path + '-wal'):
|
||||
os.remove(dest_database_path + '-wal')
|
||||
shutil.copy(restore_database_path, dest_database_path)
|
||||
os.remove(restore_database_path)
|
||||
except OSError:
|
||||
logging.exception('Unable to delete SHM and WAL file.')
|
||||
logging.exception(f'Unable to restore or delete db to {dest_database_path}')
|
||||
else:
|
||||
try:
|
||||
if os.path.isfile(f'{dest_database_path}-shm'):
|
||||
os.remove(f'{dest_database_path}-shm')
|
||||
if os.path.isfile(f'{dest_database_path}-wal'):
|
||||
os.remove(f'{dest_database_path}-wal')
|
||||
except OSError:
|
||||
logging.exception('Unable to delete SHM and WAL file.')
|
||||
try:
|
||||
os.remove(restore_database_path)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to delete {dest_database_path}')
|
||||
|
||||
logging.info('Backup restored successfully. Bazarr will restart.')
|
||||
|
||||
try:
|
||||
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create restart file: ' + repr(e))
|
||||
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
|
||||
else:
|
||||
logging.info('Bazarr is being restarted...')
|
||||
restart_file.write(str(''))
|
||||
restart_file.write('')
|
||||
restart_file.close()
|
||||
os._exit(0)
|
||||
elif os.path.isfile(restore_config_path) or os.path.isfile(restore_database_path):
|
||||
|
@ -134,11 +139,6 @@ def restore_from_backup():
|
|||
except OSError:
|
||||
logging.exception(f'Unable to delete {dest_config_path}')
|
||||
|
||||
try:
|
||||
os.remove(restore_database_path)
|
||||
except OSError:
|
||||
logging.exception(f'Unable to delete {dest_database_path}')
|
||||
|
||||
|
||||
def prepare_restore(filename):
|
||||
src_zip_file_path = os.path.join(get_backup_path(), filename)
|
||||
|
|
|
@ -2,6 +2,10 @@
|
|||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import logging
|
||||
|
||||
from app.config import settings
|
||||
|
||||
|
||||
# Wraps the input string within quotes & escapes the string
|
||||
|
@ -34,3 +38,12 @@ def pp_replace(pp_command, episode, subtitles, language, language_code2, languag
|
|||
pp_command = re.sub(r'[\'"]?{{series_id}}[\'"]?', _escape(str(series_id)), pp_command)
|
||||
pp_command = re.sub(r'[\'"]?{{episode_id}}[\'"]?', _escape(str(episode_id)), pp_command)
|
||||
return pp_command
|
||||
|
||||
|
||||
def set_chmod(subtitles_path):
|
||||
# apply chmod if required
|
||||
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
||||
'win') and settings.general.getboolean('chmod_enabled') else None
|
||||
if chmod:
|
||||
logging.debug(f"BAZARR setting permission to {chmod} on {subtitles_path} after custom post-processing.")
|
||||
os.chmod(subtitles_path, chmod)
|
||||
|
|
|
@ -3,9 +3,10 @@
|
|||
import logging
|
||||
import pickle
|
||||
|
||||
from knowit.api import know
|
||||
from knowit.api import know, KnowitException
|
||||
|
||||
from languages.custom_lang import CustomLanguage
|
||||
from languages.get_languages import language_from_alpha3, alpha3_from_alpha2
|
||||
from app.database import TableEpisodes, TableMovies
|
||||
from utilities.path_mappings import path_mappings
|
||||
from app.config import settings
|
||||
|
@ -24,38 +25,38 @@ def _handle_alpha3(detected_language: dict):
|
|||
|
||||
def embedded_subs_reader(file, file_size, episode_file_id=None, movie_file_id=None, use_cache=True):
|
||||
data = parse_video_metadata(file, file_size, episode_file_id, movie_file_id, use_cache=use_cache)
|
||||
und_default_language = alpha3_from_alpha2(settings.general.default_und_embedded_subtitles_lang)
|
||||
|
||||
subtitles_list = []
|
||||
|
||||
if not data:
|
||||
return subtitles_list
|
||||
|
||||
cache_provider = None
|
||||
if data["ffprobe"] and "subtitle" in data["ffprobe"]:
|
||||
for detected_language in data["ffprobe"]["subtitle"]:
|
||||
if "language" not in detected_language:
|
||||
continue
|
||||
|
||||
# Avoid commentary subtitles
|
||||
name = detected_language.get("name", "").lower()
|
||||
if "commentary" in name:
|
||||
logging.debug("Ignoring commentary subtitle: %s", name)
|
||||
continue
|
||||
|
||||
language = _handle_alpha3(detected_language)
|
||||
|
||||
forced = detected_language.get("forced", False)
|
||||
hearing_impaired = detected_language.get("hearing_impaired", False)
|
||||
codec = detected_language.get("format") # or None
|
||||
subtitles_list.append([language, forced, hearing_impaired, codec])
|
||||
|
||||
cache_provider = 'ffprobe'
|
||||
elif 'mediainfo' in data and data["mediainfo"] and "subtitle" in data["mediainfo"]:
|
||||
for detected_language in data["mediainfo"]["subtitle"]:
|
||||
if "language" not in detected_language:
|
||||
continue
|
||||
cache_provider = 'mediainfo'
|
||||
|
||||
if cache_provider:
|
||||
for detected_language in data[cache_provider]["subtitle"]:
|
||||
# Avoid commentary subtitles
|
||||
name = detected_language.get("name", "").lower()
|
||||
if "commentary" in name:
|
||||
logging.debug("Ignoring commentary subtitle: %s", name)
|
||||
logging.debug(f"Ignoring commentary subtitle: {name}")
|
||||
continue
|
||||
|
||||
language = _handle_alpha3(detected_language)
|
||||
if "language" not in detected_language:
|
||||
language = None
|
||||
else:
|
||||
language = _handle_alpha3(detected_language)
|
||||
|
||||
if not language and und_default_language:
|
||||
logging.debug(f"Undefined language embedded subtitles track treated as {language}")
|
||||
language = und_default_language
|
||||
|
||||
if not language:
|
||||
continue
|
||||
|
||||
forced = detected_language.get("forced", False)
|
||||
hearing_impaired = detected_language.get("hearing_impaired", False)
|
||||
|
@ -65,6 +66,34 @@ def embedded_subs_reader(file, file_size, episode_file_id=None, movie_file_id=No
|
|||
return subtitles_list
|
||||
|
||||
|
||||
def embedded_audio_reader(file, file_size, episode_file_id=None, movie_file_id=None, use_cache=True):
|
||||
data = parse_video_metadata(file, file_size, episode_file_id, movie_file_id, use_cache=use_cache)
|
||||
|
||||
audio_list = []
|
||||
|
||||
if not data:
|
||||
return audio_list
|
||||
|
||||
cache_provider = None
|
||||
if data["ffprobe"] and "audio" in data["ffprobe"]:
|
||||
cache_provider = 'ffprobe'
|
||||
elif 'mediainfo' in data and data["mediainfo"] and "audio" in data["mediainfo"]:
|
||||
cache_provider = 'mediainfo'
|
||||
|
||||
if cache_provider:
|
||||
for detected_language in data[cache_provider]["audio"]:
|
||||
if "language" not in detected_language:
|
||||
audio_list.append(None)
|
||||
continue
|
||||
|
||||
language = language_from_alpha3(detected_language["language"].alpha3)
|
||||
|
||||
if language not in audio_list:
|
||||
audio_list.append(language)
|
||||
|
||||
return audio_list
|
||||
|
||||
|
||||
def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=None, use_cache=True):
|
||||
# Define default data keys value
|
||||
data = {
|
||||
|
@ -121,10 +150,18 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
|
|||
|
||||
# if we have ffprobe available
|
||||
if ffprobe_path:
|
||||
data["ffprobe"] = know(video_path=file, context={"provider": "ffmpeg", "ffmpeg": ffprobe_path})
|
||||
try:
|
||||
data["ffprobe"] = know(video_path=file, context={"provider": "ffmpeg", "ffmpeg": ffprobe_path})
|
||||
except KnowitException as e:
|
||||
logging.error(f"BAZARR ffprobe cannot analyze this video file {file}. Could it be corrupted? {e}")
|
||||
return None
|
||||
# or if we have mediainfo available
|
||||
elif mediainfo_path:
|
||||
data["mediainfo"] = know(video_path=file, context={"provider": "mediainfo", "mediainfo": mediainfo_path})
|
||||
try:
|
||||
data["mediainfo"] = know(video_path=file, context={"provider": "mediainfo", "mediainfo": mediainfo_path})
|
||||
except KnowitException as e:
|
||||
logging.error(f"BAZARR mediainfo cannot analyze this video file {file}. Could it be corrupted? {e}")
|
||||
return None
|
||||
# else, we warn user of missing binary
|
||||
else:
|
||||
logging.error("BAZARR require ffmpeg/ffprobe or mediainfo, please install it and make sure to choose it in "
|
|
@ -1,6 +1,7 @@
|
|||
{
|
||||
"rules": {
|
||||
"no-console": "error",
|
||||
"camelcase": "warn",
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/no-empty-function": "warn",
|
||||
"@typescript-eslint/no-empty-interface": "off",
|
||||
|
@ -11,5 +12,15 @@
|
|||
"plugin:react-hooks/recommended",
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"plugins": ["testing-library"],
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"**/__tests__/**/*.[jt]s?(x)",
|
||||
"**/?(*.)+(spec|test).[jt]s?(x)"
|
||||
],
|
||||
"extends": ["plugin:testing-library/react"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
build
|
||||
dist
|
||||
converage
|
||||
coverage
|
||||
public
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -14,11 +14,11 @@
|
|||
"private": true,
|
||||
"dependencies": {
|
||||
"@mantine/core": "^5.6.0",
|
||||
"@mantine/dropzone": "^5.6.0",
|
||||
"@mantine/form": "^5.6.0",
|
||||
"@mantine/hooks": "^5.6.0",
|
||||
"@mantine/modals": "^5.6.0",
|
||||
"@mantine/notifications": "^5.6.0",
|
||||
"@mantine/dropzone": "^5.6.0",
|
||||
"axios": "^0.27.2",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
|
@ -34,7 +34,7 @@
|
|||
"@fortawesome/free-solid-svg-icons": "^6.2.0",
|
||||
"@fortawesome/react-fontawesome": "^0.2.0",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/react": "^12.1.0",
|
||||
"@testing-library/react-hooks": "^8.0.1",
|
||||
"@testing-library/user-event": "^14.4.3",
|
||||
"@types/lodash": "^4.14.0",
|
||||
|
@ -43,10 +43,13 @@
|
|||
"@types/react-dom": "^17.0.0",
|
||||
"@types/react-table": "^7.7.0",
|
||||
"@vitejs/plugin-react": "^2.2.0",
|
||||
"@vitest/coverage-c8": "^0.25.0",
|
||||
"@vitest/ui": "^0.25.0",
|
||||
"clsx": "^1.2.0",
|
||||
"eslint": "^8.26.0",
|
||||
"eslint-config-react-app": "^7.0.1",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-testing-library": "^5.9.0",
|
||||
"husky": "^8.0.2",
|
||||
"jsdom": "^20.0.1",
|
||||
"lodash": "^4.17.0",
|
||||
|
@ -59,8 +62,8 @@
|
|||
"sass": "^1.55.0",
|
||||
"typescript": "^4",
|
||||
"vite": "^3.2.1",
|
||||
"vite-plugin-checker": "^0.5.1",
|
||||
"vitest": "^0.24.3"
|
||||
"vite-plugin-checker": "^0.5.5",
|
||||
"vitest": "^0.25.0"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "vite",
|
||||
|
@ -70,6 +73,8 @@
|
|||
"check:ts": "tsc --noEmit --incremental false",
|
||||
"check:fmt": "prettier -c .",
|
||||
"test": "vitest",
|
||||
"test:ui": "vitest --ui",
|
||||
"coverage": "vitest run --coverage",
|
||||
"format": "prettier -w .",
|
||||
"prepare": "cd .. && husky install frontend/.husky"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import { render } from "@/tests";
|
||||
import { describe, it } from "vitest";
|
||||
import App from ".";
|
||||
|
||||
describe("App", () => {
|
||||
it("should render without crash", () => {
|
||||
render(<App />);
|
||||
});
|
||||
});
|
|
@ -4,7 +4,7 @@ import { Layout } from "@/constants";
|
|||
import NavbarProvider from "@/contexts/Navbar";
|
||||
import OnlineProvider from "@/contexts/Online";
|
||||
import { notification } from "@/modules/task";
|
||||
import CriticalError from "@/pages/CriticalError";
|
||||
import CriticalError from "@/pages/errors/CriticalError";
|
||||
import { RouterNames } from "@/Router/RouterNames";
|
||||
import { Environment } from "@/utilities";
|
||||
import { AppShell } from "@mantine/core";
|
||||
|
|
|
@ -10,10 +10,10 @@ const Redirector: FunctionComponent = () => {
|
|||
|
||||
useEffect(() => {
|
||||
if (data) {
|
||||
const { use_sonarr, use_radarr } = data.general;
|
||||
if (use_sonarr) {
|
||||
const { use_sonarr: useSonarr, use_radarr: useRadarr } = data.general;
|
||||
if (useSonarr) {
|
||||
navigate("/series");
|
||||
} else if (use_radarr) {
|
||||
} else if (useRadarr) {
|
||||
navigate("/movies");
|
||||
} else {
|
||||
navigate("/settings/general");
|
||||
|
|
|
@ -6,12 +6,12 @@ import Authentication from "@/pages/Authentication";
|
|||
import BlacklistMoviesView from "@/pages/Blacklist/Movies";
|
||||
import BlacklistSeriesView from "@/pages/Blacklist/Series";
|
||||
import Episodes from "@/pages/Episodes";
|
||||
import NotFound from "@/pages/errors/NotFound";
|
||||
import MoviesHistoryView from "@/pages/History/Movies";
|
||||
import SeriesHistoryView from "@/pages/History/Series";
|
||||
import MovieView from "@/pages/Movies";
|
||||
import MovieDetailView from "@/pages/Movies/Details";
|
||||
import MovieMassEditor from "@/pages/Movies/Editor";
|
||||
import NotFound from "@/pages/NotFound";
|
||||
import SeriesView from "@/pages/Series";
|
||||
import SeriesMassEditor from "@/pages/Series/Editor";
|
||||
import SettingsGeneralView from "@/pages/Settings/General";
|
||||
|
@ -23,6 +23,7 @@ import SettingsSchedulerView from "@/pages/Settings/Scheduler";
|
|||
import SettingsSonarrView from "@/pages/Settings/Sonarr";
|
||||
import SettingsSubtitlesView from "@/pages/Settings/Subtitles";
|
||||
import SettingsUIView from "@/pages/Settings/UI";
|
||||
import SystemAnnouncementsView from "@/pages/System/Announcements";
|
||||
import SystemBackupsView from "@/pages/System/Backups";
|
||||
import SystemLogsView from "@/pages/System/Logs";
|
||||
import SystemProvidersView from "@/pages/System/Providers";
|
||||
|
@ -278,6 +279,12 @@ function useRoutes(): CustomRouteObject[] {
|
|||
name: "Releases",
|
||||
element: <SystemReleasesView></SystemReleasesView>,
|
||||
},
|
||||
{
|
||||
path: "announcements",
|
||||
name: "Announcements",
|
||||
badge: data?.announcements,
|
||||
element: <SystemAnnouncementsView></SystemAnnouncementsView>,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
@ -299,6 +306,7 @@ function useRoutes(): CustomRouteObject[] {
|
|||
data?.providers,
|
||||
data?.sonarr_signalr,
|
||||
data?.radarr_signalr,
|
||||
data?.announcements,
|
||||
radarr,
|
||||
sonarr,
|
||||
]
|
||||
|
|
|
@ -6,7 +6,15 @@ import { QueryKeys } from "../queries/keys";
|
|||
import api from "../raw";
|
||||
|
||||
export function useBadges() {
|
||||
return useQuery([QueryKeys.System, QueryKeys.Badges], () => api.badges.all());
|
||||
return useQuery(
|
||||
[QueryKeys.System, QueryKeys.Badges],
|
||||
() => api.badges.all(),
|
||||
{
|
||||
refetchOnWindowFocus: "always",
|
||||
refetchInterval: 1000 * 60,
|
||||
staleTime: 1000 * 10,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useFileSystem(
|
||||
|
@ -49,6 +57,11 @@ export function useSettingsMutation() {
|
|||
{
|
||||
onSuccess: () => {
|
||||
client.invalidateQueries([QueryKeys.System]);
|
||||
client.invalidateQueries([QueryKeys.Series]);
|
||||
client.invalidateQueries([QueryKeys.Episodes]);
|
||||
client.invalidateQueries([QueryKeys.Movies]);
|
||||
client.invalidateQueries([QueryKeys.Wanted]);
|
||||
client.invalidateQueries([QueryKeys.Badges]);
|
||||
},
|
||||
}
|
||||
);
|
||||
|
@ -68,7 +81,7 @@ export function useSystemLogs() {
|
|||
return useQuery([QueryKeys.System, QueryKeys.Logs], () => api.system.logs(), {
|
||||
refetchOnWindowFocus: "always",
|
||||
refetchInterval: 1000 * 60,
|
||||
staleTime: 1000,
|
||||
staleTime: 1000 * 10,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -85,6 +98,35 @@ export function useDeleteLogs() {
|
|||
);
|
||||
}
|
||||
|
||||
export function useSystemAnnouncements() {
|
||||
return useQuery(
|
||||
[QueryKeys.System, QueryKeys.Announcements],
|
||||
() => api.system.announcements(),
|
||||
{
|
||||
refetchOnWindowFocus: "always",
|
||||
refetchInterval: 1000 * 60,
|
||||
staleTime: 1000 * 10,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useSystemAnnouncementsAddDismiss() {
|
||||
const client = useQueryClient();
|
||||
return useMutation(
|
||||
[QueryKeys.System, QueryKeys.Announcements],
|
||||
(param: { hash: string }) => {
|
||||
const { hash } = param;
|
||||
return api.system.addAnnouncementsDismiss(hash);
|
||||
},
|
||||
{
|
||||
onSuccess: (_, { hash }) => {
|
||||
client.invalidateQueries([QueryKeys.System, QueryKeys.Announcements]);
|
||||
client.invalidateQueries([QueryKeys.System, QueryKeys.Badges]);
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useSystemTasks() {
|
||||
return useQuery(
|
||||
[QueryKeys.System, QueryKeys.Tasks],
|
||||
|
|
|
@ -13,6 +13,7 @@ export enum QueryKeys {
|
|||
Blacklist = "blacklist",
|
||||
Search = "search",
|
||||
Actions = "actions",
|
||||
Announcements = "announcements",
|
||||
Tasks = "tasks",
|
||||
Backups = "backups",
|
||||
Logs = "logs",
|
||||
|
|
|
@ -87,6 +87,19 @@ class SystemApi extends BaseApi {
|
|||
await this.delete("/logs");
|
||||
}
|
||||
|
||||
async announcements() {
|
||||
const response = await this.get<DataWrapper<System.Announcements[]>>(
|
||||
"/announcements"
|
||||
);
|
||||
return response.data;
|
||||
}
|
||||
|
||||
async addAnnouncementsDismiss(hash: string) {
|
||||
await this.post<DataWrapper<System.Announcements[]>>("/announcements", {
|
||||
hash,
|
||||
});
|
||||
}
|
||||
|
||||
async tasks() {
|
||||
const response = await this.get<DataWrapper<System.Task[]>>("/tasks");
|
||||
return response.data;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import UIError from "@/pages/UIError";
|
||||
import UIError from "@/pages/errors/UIError";
|
||||
import { Component } from "react";
|
||||
|
||||
interface State {
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
import { rawRender, screen } from "@/tests";
|
||||
import { describe, it } from "vitest";
|
||||
import { Language } from ".";
|
||||
|
||||
describe("Language text", () => {
|
||||
const testLanguage: Language.Info = {
|
||||
code2: "en",
|
||||
name: "English",
|
||||
};
|
||||
|
||||
it("should show short text", () => {
|
||||
rawRender(<Language.Text value={testLanguage}></Language.Text>);
|
||||
|
||||
expect(screen.getByText(testLanguage.code2)).toBeDefined();
|
||||
});
|
||||
|
||||
it("should show long text", () => {
|
||||
rawRender(<Language.Text value={testLanguage} long></Language.Text>);
|
||||
|
||||
expect(screen.getByText(testLanguage.name)).toBeDefined();
|
||||
});
|
||||
|
||||
const testLanguageWithHi: Language.Info = { ...testLanguage, hi: true };
|
||||
|
||||
it("should show short text with HI", () => {
|
||||
rawRender(<Language.Text value={testLanguageWithHi}></Language.Text>);
|
||||
|
||||
const expectedText = `${testLanguageWithHi.code2}:HI`;
|
||||
|
||||
expect(screen.getByText(expectedText)).toBeDefined();
|
||||
});
|
||||
|
||||
it("should show long text with HI", () => {
|
||||
rawRender(<Language.Text value={testLanguageWithHi} long></Language.Text>);
|
||||
|
||||
const expectedText = `${testLanguageWithHi.name} HI`;
|
||||
|
||||
expect(screen.getByText(expectedText)).toBeDefined();
|
||||
});
|
||||
|
||||
const testLanguageWithForced: Language.Info = {
|
||||
...testLanguage,
|
||||
forced: true,
|
||||
};
|
||||
|
||||
it("should show short text with Forced", () => {
|
||||
rawRender(<Language.Text value={testLanguageWithForced}></Language.Text>);
|
||||
|
||||
const expectedText = `${testLanguageWithHi.code2}:Forced`;
|
||||
|
||||
expect(screen.getByText(expectedText)).toBeDefined();
|
||||
});
|
||||
|
||||
it("should show long text with Forced", () => {
|
||||
rawRender(
|
||||
<Language.Text value={testLanguageWithForced} long></Language.Text>
|
||||
);
|
||||
|
||||
const expectedText = `${testLanguageWithHi.name} Forced`;
|
||||
|
||||
expect(screen.getByText(expectedText)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Language list", () => {
|
||||
const elements: Language.Info[] = [
|
||||
{
|
||||
code2: "en",
|
||||
name: "English",
|
||||
},
|
||||
{
|
||||
code2: "zh",
|
||||
name: "Chinese",
|
||||
},
|
||||
];
|
||||
|
||||
it("should show all languages", () => {
|
||||
rawRender(<Language.List value={elements}></Language.List>);
|
||||
|
||||
elements.forEach((value) => {
|
||||
expect(screen.getByText(value.name)).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -9,6 +9,7 @@ import {
|
|||
Accordion,
|
||||
Button,
|
||||
Checkbox,
|
||||
Select,
|
||||
Stack,
|
||||
Switch,
|
||||
Text,
|
||||
|
@ -26,6 +27,7 @@ const defaultCutoffOptions: SelectorOption<Language.ProfileItem>[] = [
|
|||
label: "Any",
|
||||
value: {
|
||||
id: anyCutoff,
|
||||
// eslint-disable-next-line camelcase
|
||||
audio_exclude: "False",
|
||||
forced: "False",
|
||||
hi: "False",
|
||||
|
@ -34,6 +36,21 @@ const defaultCutoffOptions: SelectorOption<Language.ProfileItem>[] = [
|
|||
},
|
||||
];
|
||||
|
||||
const subtitlesTypeOptions: SelectorOption<string>[] = [
|
||||
{
|
||||
label: "Normal or hearing-impaired",
|
||||
value: "normal",
|
||||
},
|
||||
{
|
||||
label: "Hearing-impaired required",
|
||||
value: "hi",
|
||||
},
|
||||
{
|
||||
label: "Forced (foreign part only)",
|
||||
value: "forced",
|
||||
},
|
||||
];
|
||||
|
||||
interface Props {
|
||||
onComplete?: (profile: Language.Profile) => void;
|
||||
languages: readonly Language.Info[];
|
||||
|
@ -112,6 +129,7 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
const item: Language.ProfileItem = {
|
||||
id,
|
||||
language,
|
||||
// eslint-disable-next-line camelcase
|
||||
audio_exclude: "False",
|
||||
hi: "False",
|
||||
forced: "False",
|
||||
|
@ -157,43 +175,38 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
},
|
||||
},
|
||||
{
|
||||
Header: "Forced",
|
||||
Header: "Subtitles Type",
|
||||
accessor: "forced",
|
||||
Cell: ({ row: { original: item, index }, value }) => {
|
||||
const selectValue = useMemo(() => {
|
||||
if (item.forced === "True") {
|
||||
return "forced";
|
||||
} else if (item.hi === "True") {
|
||||
return "hi";
|
||||
} else {
|
||||
return "normal";
|
||||
}
|
||||
}, [item.forced, item.hi]);
|
||||
|
||||
return (
|
||||
<Checkbox
|
||||
checked={value === "True"}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, {
|
||||
...item,
|
||||
forced: checked ? "True" : "False",
|
||||
hi: checked ? "False" : item.hi,
|
||||
});
|
||||
<Select
|
||||
value={selectValue}
|
||||
data={subtitlesTypeOptions}
|
||||
onChange={(value) => {
|
||||
if (value) {
|
||||
action.mutate(index, {
|
||||
...item,
|
||||
hi: value === "hi" ? "True" : "False",
|
||||
forced: value === "forced" ? "True" : "False",
|
||||
});
|
||||
}
|
||||
}}
|
||||
></Checkbox>
|
||||
></Select>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
Header: "HI",
|
||||
accessor: "hi",
|
||||
Cell: ({ row: { original: item, index }, value }) => {
|
||||
return (
|
||||
<Checkbox
|
||||
checked={value === "True"}
|
||||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, {
|
||||
...item,
|
||||
hi: checked ? "True" : "False",
|
||||
forced: checked ? "False" : item.forced,
|
||||
});
|
||||
}}
|
||||
></Checkbox>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
Header: "Exclude Audio",
|
||||
Header: "Exclude If Matching Audio",
|
||||
accessor: "audio_exclude",
|
||||
Cell: ({ row: { original: item, index }, value }) => {
|
||||
return (
|
||||
|
@ -202,6 +215,7 @@ const ProfileEditForm: FunctionComponent<Props> = ({
|
|||
onChange={({ currentTarget: { checked } }) => {
|
||||
action.mutate(index, {
|
||||
...item,
|
||||
// eslint-disable-next-line camelcase
|
||||
audio_exclude: checked ? "True" : "False",
|
||||
});
|
||||
}}
|
||||
|
@ -317,8 +331,6 @@ export const ProfileEditModal = withModal(
|
|||
"languages-profile-editor",
|
||||
{
|
||||
title: "Edit Languages Profile",
|
||||
size: "lg",
|
||||
size: "xl",
|
||||
}
|
||||
);
|
||||
|
||||
export default ProfileEditForm;
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
import { rawRender, screen } from "@/tests";
|
||||
import { faStickyNote } from "@fortawesome/free-regular-svg-icons";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { describe, it, vitest } from "vitest";
|
||||
import Action from "./Action";
|
||||
|
||||
const testLabel = "Test Label";
|
||||
const testIcon = faStickyNote;
|
||||
|
||||
describe("Action button", () => {
|
||||
it("should be a button", () => {
|
||||
rawRender(<Action icon={testIcon} label={testLabel}></Action>);
|
||||
const element = screen.getByRole("button", { name: testLabel });
|
||||
|
||||
expect(element.getAttribute("type")).toEqual("button");
|
||||
expect(element.getAttribute("aria-label")).toEqual(testLabel);
|
||||
});
|
||||
|
||||
it("should show icon", () => {
|
||||
rawRender(<Action icon={testIcon} label={testLabel}></Action>);
|
||||
// TODO: use getBy...
|
||||
const element = screen.getByRole("img", { hidden: true });
|
||||
|
||||
expect(element.getAttribute("data-prefix")).toEqual(testIcon.prefix);
|
||||
expect(element.getAttribute("data-icon")).toEqual(testIcon.iconName);
|
||||
});
|
||||
|
||||
it("should call on-click event when clicked", async () => {
|
||||
const onClickFn = vitest.fn();
|
||||
rawRender(
|
||||
<Action icon={testIcon} label={testLabel} onClick={onClickFn}></Action>
|
||||
);
|
||||
|
||||
await userEvent.click(screen.getByRole("button", { name: testLabel }));
|
||||
|
||||
expect(onClickFn).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,48 @@
|
|||
import { rawRender, screen } from "@/tests";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { describe, it, vitest } from "vitest";
|
||||
import ChipInput from "./ChipInput";
|
||||
|
||||
describe("ChipInput", () => {
|
||||
const existedValues = ["value_1", "value_2"];
|
||||
|
||||
// TODO: Support default value
|
||||
it.skip("should works with default value", () => {
|
||||
rawRender(<ChipInput defaultValue={existedValues}></ChipInput>);
|
||||
|
||||
existedValues.forEach((value) => {
|
||||
expect(screen.getByText(value)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("should works with value", () => {
|
||||
rawRender(<ChipInput value={existedValues}></ChipInput>);
|
||||
|
||||
existedValues.forEach((value) => {
|
||||
expect(screen.getByText(value)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it.skip("should allow user creates new value", async () => {
|
||||
const typedValue = "value_3";
|
||||
const mockedFn = vitest.fn((values: string[]) => {
|
||||
expect(values).toContain(typedValue);
|
||||
});
|
||||
|
||||
rawRender(
|
||||
<ChipInput value={existedValues} onChange={mockedFn}></ChipInput>
|
||||
);
|
||||
|
||||
const element = screen.getByRole("searchbox");
|
||||
|
||||
await userEvent.type(element, typedValue);
|
||||
|
||||
expect(element).toHaveValue(typedValue);
|
||||
|
||||
const createBtn = screen.getByText(`Add "${typedValue}"`);
|
||||
|
||||
await userEvent.click(createBtn);
|
||||
|
||||
expect(mockedFn).toBeCalledTimes(1);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,151 @@
|
|||
import { rawRender, screen } from "@/tests";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { describe, it, vitest } from "vitest";
|
||||
import { Selector, SelectorOption } from "./Selector";
|
||||
|
||||
const selectorName = "Test Selections";
|
||||
const testOptions: SelectorOption<string>[] = [
|
||||
{
|
||||
label: "Option 1",
|
||||
value: "option_1",
|
||||
},
|
||||
{
|
||||
label: "Option 2",
|
||||
value: "option_2",
|
||||
},
|
||||
];
|
||||
|
||||
describe("Selector", () => {
|
||||
describe("options", () => {
|
||||
it("should work with the SelectorOption", () => {
|
||||
rawRender(
|
||||
<Selector name={selectorName} options={testOptions}></Selector>
|
||||
);
|
||||
|
||||
// TODO: selectorName
|
||||
expect(screen.getByRole("searchbox")).toBeDefined();
|
||||
});
|
||||
|
||||
it("should display when clicked", async () => {
|
||||
rawRender(
|
||||
<Selector name={selectorName} options={testOptions}></Selector>
|
||||
);
|
||||
|
||||
const element = screen.getByRole("searchbox");
|
||||
|
||||
await userEvent.click(element);
|
||||
|
||||
expect(screen.queryAllByRole("option")).toHaveLength(testOptions.length);
|
||||
|
||||
testOptions.forEach((option) => {
|
||||
expect(screen.getByText(option.label)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
it("shouldn't show default value", async () => {
|
||||
const option = testOptions[0];
|
||||
rawRender(
|
||||
<Selector
|
||||
name={selectorName}
|
||||
options={testOptions}
|
||||
defaultValue={option.value}
|
||||
></Selector>
|
||||
);
|
||||
|
||||
expect(screen.getByDisplayValue(option.label)).toBeDefined();
|
||||
});
|
||||
|
||||
it("shouldn't show value", async () => {
|
||||
const option = testOptions[0];
|
||||
rawRender(
|
||||
<Selector
|
||||
name={selectorName}
|
||||
options={testOptions}
|
||||
value={option.value}
|
||||
></Selector>
|
||||
);
|
||||
|
||||
expect(screen.getByDisplayValue(option.label)).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("event", () => {
|
||||
it("should fire on-change event when clicking option", async () => {
|
||||
const clickedOption = testOptions[0];
|
||||
const mockedFn = vitest.fn((value: string | null) => {
|
||||
expect(value).toEqual(clickedOption.value);
|
||||
});
|
||||
rawRender(
|
||||
<Selector
|
||||
name={selectorName}
|
||||
options={testOptions}
|
||||
onChange={mockedFn}
|
||||
></Selector>
|
||||
);
|
||||
|
||||
const element = screen.getByRole("searchbox");
|
||||
|
||||
await userEvent.click(element);
|
||||
|
||||
await userEvent.click(screen.getByText(clickedOption.label));
|
||||
|
||||
expect(mockedFn).toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("with object options", () => {
|
||||
const objectOptions: SelectorOption<{ name: string }>[] = [
|
||||
{
|
||||
label: "Option 1",
|
||||
value: {
|
||||
name: "option_1",
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Option 2",
|
||||
value: {
|
||||
name: "option_2",
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
it("should fire on-change event with payload", async () => {
|
||||
const clickedOption = objectOptions[0];
|
||||
|
||||
const mockedFn = vitest.fn((value: { name: string } | null) => {
|
||||
expect(value).toEqual(clickedOption.value);
|
||||
});
|
||||
rawRender(
|
||||
<Selector
|
||||
name={selectorName}
|
||||
options={objectOptions}
|
||||
onChange={mockedFn}
|
||||
getkey={(v) => v.name}
|
||||
></Selector>
|
||||
);
|
||||
|
||||
const element = screen.getByRole("searchbox");
|
||||
|
||||
await userEvent.click(element);
|
||||
|
||||
await userEvent.click(screen.getByText(clickedOption.label));
|
||||
|
||||
expect(mockedFn).toBeCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("placeholder", () => {
|
||||
it("should show when no selection", () => {
|
||||
const placeholder = "Empty Selection";
|
||||
rawRender(
|
||||
<Selector
|
||||
name={selectorName}
|
||||
options={testOptions}
|
||||
placeholder={placeholder}
|
||||
></Selector>
|
||||
);
|
||||
|
||||
expect(screen.getByPlaceholderText(placeholder)).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,3 +1,4 @@
|
|||
/* eslint-disable camelcase */
|
||||
import {
|
||||
useEpisodeAddBlacklist,
|
||||
useEpisodeHistory,
|
||||
|
|
|
@ -154,8 +154,8 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
|
|||
{
|
||||
accessor: "matches",
|
||||
Cell: (row) => {
|
||||
const { matches, dont_matches } = row.row.original;
|
||||
return <StateIcon matches={matches} dont={dont_matches}></StateIcon>;
|
||||
const { matches, dont_matches: dont } = row.row.original;
|
||||
return <StateIcon matches={matches} dont={dont}></StateIcon>;
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
|
@ -82,6 +82,7 @@ const SubtitleToolView: FunctionComponent<SubtitleToolViewProps> = ({
|
|||
type,
|
||||
language: v.code2,
|
||||
path: v.path,
|
||||
// eslint-disable-next-line camelcase
|
||||
raw_language: v,
|
||||
},
|
||||
];
|
||||
|
|
|
@ -1,10 +1,20 @@
|
|||
import { StrictMode } from "react";
|
||||
import ReactDOM from "react-dom";
|
||||
import { Main } from "./main";
|
||||
import { useRoutes } from "react-router-dom";
|
||||
import { AllProviders } from "./providers";
|
||||
import { useRouteItems } from "./Router";
|
||||
|
||||
const RouteApp = () => {
|
||||
const items = useRouteItems();
|
||||
|
||||
return useRoutes(items);
|
||||
};
|
||||
|
||||
ReactDOM.render(
|
||||
<StrictMode>
|
||||
<Main />
|
||||
<AllProviders>
|
||||
<RouteApp />
|
||||
</AllProviders>
|
||||
</StrictMode>,
|
||||
document.getElementById("root")
|
||||
);
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
import { describe, it } from "vitest";
|
||||
import { StaticModals } from "./WithModal";
|
||||
|
||||
describe("modal tests", () => {
|
||||
it.skip("no duplicated modals", () => {
|
||||
const existedKeys = new Set<string>();
|
||||
StaticModals.forEach(({ modalKey }) => {
|
||||
expect(existedKeys.has(modalKey)).toBeFalsy();
|
||||
existedKeys.add(modalKey);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,7 +1,7 @@
|
|||
import { debounce, forIn, remove, uniq } from "lodash";
|
||||
import { onlineManager } from "react-query";
|
||||
import { io, Socket } from "socket.io-client";
|
||||
import { Environment, isDevEnv } from "../../utilities";
|
||||
import { Environment, isDevEnv, isTestEnv } from "../../utilities";
|
||||
import { ENSURE, GROUP, LOG } from "../../utilities/console";
|
||||
import { createDefaultReducer } from "./reducer";
|
||||
|
||||
|
@ -51,6 +51,10 @@ class SocketIOClient {
|
|||
}
|
||||
|
||||
initialize() {
|
||||
if (isTestEnv) {
|
||||
return;
|
||||
}
|
||||
|
||||
LOG("info", "Initializing Socket.IO client...");
|
||||
this.reducers.push(...createDefaultReducer());
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import queryClient from "@/apis/queries";
|
|||
import { QueryKeys } from "@/apis/queries/keys";
|
||||
import { LOG } from "@/utilities/console";
|
||||
import { setCriticalError, setOnlineStatus } from "@/utilities/event";
|
||||
import { showNotification } from "@mantine/notifications";
|
||||
import { cleanNotifications, showNotification } from "@mantine/notifications";
|
||||
import { notification, task } from "../task";
|
||||
|
||||
export function createDefaultReducer(): SocketIO.Reducer[] {
|
||||
|
@ -15,6 +15,7 @@ export function createDefaultReducer(): SocketIO.Reducer[] {
|
|||
key: "connect_error",
|
||||
any: () => {
|
||||
setCriticalError("Cannot connect to backend");
|
||||
cleanNotifications();
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
|
@ -10,10 +10,12 @@ import { notification } from "./notification";
|
|||
class TaskDispatcher {
|
||||
private running: boolean;
|
||||
private tasks: Record<string, Task.Callable[]> = {};
|
||||
private progress: Record<string, boolean> = {};
|
||||
|
||||
constructor() {
|
||||
this.running = false;
|
||||
this.tasks = {};
|
||||
this.progress = {};
|
||||
|
||||
window.addEventListener("beforeunload", this.onBeforeUnload.bind(this));
|
||||
}
|
||||
|
@ -108,9 +110,10 @@ class TaskDispatcher {
|
|||
// TODO: FIX ME!
|
||||
item.value += 1;
|
||||
|
||||
if (item.value >= item.count) {
|
||||
if (item.value >= item.count && this.progress[item.id]) {
|
||||
updateNotification(notification.progress.end(item.id, item.header));
|
||||
} else if (item.value > 1) {
|
||||
delete this.progress[item.id];
|
||||
} else if (item.value > 1 && this.progress[item.id]) {
|
||||
updateNotification(
|
||||
notification.progress.update(
|
||||
item.id,
|
||||
|
@ -120,8 +123,10 @@ class TaskDispatcher {
|
|||
item.count
|
||||
)
|
||||
);
|
||||
} else {
|
||||
} else if (item.value > 1 && this.progress[item.id] === undefined) {
|
||||
showNotification(notification.progress.pending(item.id, item.header));
|
||||
this.progress[item.id] = true;
|
||||
setTimeout(() => this.updateProgress([item]), 1000);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ export const notification = {
|
|||
title: header,
|
||||
message: `[${current}/${total}] ${body}`,
|
||||
loading: true,
|
||||
autoClose: 2 * 60 * 1000,
|
||||
autoClose: false,
|
||||
};
|
||||
},
|
||||
end: (id: string, header: string): NotificationProps & { id: string } => {
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import { render, screen } from "@/tests";
|
||||
import { describe, it } from "vitest";
|
||||
import Authentication from "./Authentication";
|
||||
|
||||
describe("Authentication", () => {
|
||||
it("should render without crash", () => {
|
||||
render(<Authentication></Authentication>);
|
||||
|
||||
expect(screen.getByPlaceholderText("Username")).toBeDefined();
|
||||
expect(screen.getByPlaceholderText("Password")).toBeDefined();
|
||||
expect(screen.getByRole("button", { name: "Login" })).toBeDefined();
|
||||
});
|
||||
});
|
|
@ -40,11 +40,13 @@ const Authentication: FunctionComponent = () => {
|
|||
>
|
||||
<Stack>
|
||||
<TextInput
|
||||
name="Username"
|
||||
placeholder="Username"
|
||||
required
|
||||
{...form.getInputProps("username")}
|
||||
></TextInput>
|
||||
<PasswordInput
|
||||
name="Password"
|
||||
required
|
||||
placeholder="Password"
|
||||
{...form.getInputProps("password")}
|
||||
|
|
|
@ -75,6 +75,7 @@ const Table: FunctionComponent<Props> = ({ blacklist }) => {
|
|||
all: false,
|
||||
form: {
|
||||
provider: row.original.provider,
|
||||
// eslint-disable-next-line camelcase
|
||||
subs_id: value,
|
||||
},
|
||||
})}
|
||||
|
|
|
@ -82,6 +82,7 @@ const Table: FunctionComponent<Props> = ({ blacklist }) => {
|
|||
all: false,
|
||||
form: {
|
||||
provider: row.original.provider,
|
||||
// eslint-disable-next-line camelcase
|
||||
subs_id: value,
|
||||
},
|
||||
})}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue