Added PostgreSQL as optional database engine

Wiki: https://wiki.bazarr.media/Additional-Configuration/PostgreSQL-Database/
This commit is contained in:
halali 2023-01-29 22:44:56 +01:00 committed by GitHub
parent 9f2ba673de
commit d5911e78b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 619 additions and 525 deletions

View File

@ -1,6 +1,5 @@
# coding=utf-8
import datetime
import pretty
from flask_restx import Resource, Namespace, reqparse, fields
@ -13,7 +12,7 @@ from subtitles.mass_download import episode_download_subtitles
from app.event_handler import event_stream
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessEpisode
from ..utils import authenticate, postprocess
api_ns_episodes_blacklist = Namespace('Episodes Blacklist', description='List, add or remove subtitles to or from '
'episodes blacklist')
@ -59,18 +58,17 @@ class EpisodesBlacklist(Resource):
TableBlacklist.timestamp)\
.join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\
.join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\
.order_by(TableBlacklist.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
data = list(data)
.order_by(TableBlacklist.timestamp.desc())
if length > 0:
data = data.limit(length).offset(start)
data = list(data.dicts())
for item in data:
# Make timestamp pretty
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item.update({'timestamp': pretty.date(item['timestamp'])})
postprocessEpisode(item)
postprocess(item)
return data

View File

@ -5,7 +5,7 @@ from flask_restx import Resource, Namespace, reqparse, fields
from app.database import TableEpisodes
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from ..utils import authenticate, postprocessEpisode
from ..utils import authenticate, postprocess
api_ns_episodes = Namespace('Episodes', description='List episodes metadata for specific series or episodes.')
@ -68,6 +68,6 @@ class Episodes(Resource):
result = list(result)
for item in result:
postprocessEpisode(item)
postprocess(item)
return result

View File

@ -42,13 +42,14 @@ class EpisodesSubtitles(Resource):
args = self.patch_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language,
TableShows.title) \
episodeInfo = TableEpisodes.select(
TableEpisodes.path,
TableEpisodes.sceneName,
TableEpisodes.audio_language,
TableShows.title) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
if not episodeInfo:
@ -56,13 +57,13 @@ class EpisodesSubtitles(Resource):
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] or "None"
sceneName = episodeInfo['sceneName'] or "None"
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -119,10 +120,10 @@ class EpisodesSubtitles(Resource):
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language)\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
TableEpisodes.sceneName,
TableEpisodes.audio_language) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
if not episodeInfo:
@ -130,7 +131,7 @@ class EpisodesSubtitles(Resource):
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] or "None"
sceneName = episodeInfo['sceneName'] or "None"
audio_language = episodeInfo['audio_language']
language = args.get('language')
@ -149,7 +150,7 @@ class EpisodesSubtitles(Resource):
forced=forced,
hi=hi,
title=title,
scene_name=sceneName,
sceneName=sceneName,
media_type='series',
subtitle=subFile,
audio_language=audio_language)
@ -199,10 +200,10 @@ class EpisodesSubtitles(Resource):
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.audio_language)\
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)\
.dicts()\
TableEpisodes.sceneName,
TableEpisodes.audio_language) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
if not episodeInfo:

View File

@ -15,7 +15,7 @@ from app.config import settings
from utilities.path_mappings import path_mappings
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessEpisode
from ..utils import authenticate, postprocess
api_ns_episodes_history = Namespace('Episodes History', description='List episodes history events')
@ -73,8 +73,7 @@ class EpisodesHistory(Resource):
upgradable_episodes_not_perfect = []
if settings.general.getboolean('upgrade_subs'):
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime.datetime(1970, 1, 1)).total_seconds()
minimum_timestamp = (datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 6]
@ -90,11 +89,15 @@ class EpisodesHistory(Resource):
TableHistory.score,
TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType)\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, upgradable_episodes_conditions))\
.group_by(TableHistory.video_path)\
TableShows.seriesType) \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, upgradable_episodes_conditions)) \
.group_by(TableHistory.video_path,
TableHistory.score,
TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType) \
.dicts()
upgradable_episodes = list(upgradable_episodes)
for upgradable_episode in upgradable_episodes:
@ -114,7 +117,8 @@ class EpisodesHistory(Resource):
episode_history = TableHistory.select(TableHistory.id,
TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias(
'episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableHistory.timestamp,
TableHistory.subs_id,
@ -129,15 +133,14 @@ class EpisodesHistory(Resource):
TableHistory.subtitles_path,
TableHistory.sonarrEpisodeId,
TableHistory.provider,
TableShows.seriesType)\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(query_condition)\
.order_by(TableHistory.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
episode_history = list(episode_history)
TableShows.seriesType) \
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.where(query_condition) \
.order_by(TableHistory.timestamp.desc())
if length > 0:
episode_history = episode_history.limit(length).offset(start)
episode_history = list(episode_history.dicts())
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
blacklist_db = list(blacklist_db)
@ -145,7 +148,7 @@ class EpisodesHistory(Resource):
for item in episode_history:
# Mark episode as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored']),
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: # noqa: E129
if os.path.exists(path_mappings.path_replace(item['subtitles_path'])) and \
@ -154,16 +157,16 @@ class EpisodesHistory(Resource):
del item['path']
postprocessEpisode(item)
postprocess(item)
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = int(item['timestamp'])
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item['timestamp'] = pretty.date(item["raw_timestamp"])
item["raw_timestamp"] = item['timestamp'].timestamp()
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
@ -174,8 +177,8 @@ class EpisodesHistory(Resource):
item.update({"blacklisted": True})
break
count = TableHistory.select()\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
count = TableHistory.select() \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.where(TableEpisodes.title.is_null(False)).count()
return {'data': episode_history, 'total': count}

View File

@ -8,7 +8,7 @@ from functools import reduce
from app.database import get_exclusion_clause, TableEpisodes, TableShows
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessEpisode
from ..utils import authenticate, postprocess
api_ns_episodes_wanted = Namespace('Episodes Wanted', description='List episodes wanted subtitles')
@ -65,7 +65,7 @@ class EpisodesWanted(Resource):
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name.alias('sceneName'),
TableEpisodes.sceneName,
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
@ -82,20 +82,20 @@ class EpisodesWanted(Resource):
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name.alias('sceneName'),
TableEpisodes.sceneName,
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(wanted_condition)\
.order_by(TableEpisodes.rowid.desc())\
.limit(length)\
.offset(start)\
.dicts()
.order_by(TableEpisodes.rowid.desc())
if length > 0:
data = data.limit(length).offset(start)
data = data.dicts()
data = list(data)
for item in data:
postprocessEpisode(item)
postprocess(item)
count_conditions = [(TableEpisodes.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('series')

View File

@ -1,6 +1,5 @@
# coding=utf-8
import time
import datetime
import operator
import itertools
@ -63,8 +62,8 @@ class HistoryStats(Resource):
elif timeframe == 'week':
delay = 6 * 24 * 60 * 60
now = time.time()
past = now - delay
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=delay)
history_where_clauses = [(TableHistory.timestamp.between(past, now))]
history_where_clauses_movie = [(TableHistoryMovie.timestamp.between(past, now))]
@ -92,7 +91,7 @@ class HistoryStats(Resource):
.dicts()
data_series = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_series),
key=lambda x: datetime.datetime.fromtimestamp(x['timestamp']).strftime(
key=lambda x: x['timestamp'].strftime(
'%Y-%m-%d'))]
data_movies = TableHistoryMovie.select(TableHistoryMovie.timestamp, TableHistoryMovie.id) \
@ -100,7 +99,7 @@ class HistoryStats(Resource):
.dicts()
data_movies = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_movies),
key=lambda x: datetime.datetime.fromtimestamp(x['timestamp']).strftime(
key=lambda x: x['timestamp'].strftime(
'%Y-%m-%d'))]
for dt in rrule.rrule(rrule.DAILY,

View File

@ -1,6 +1,5 @@
# coding=utf-8
import datetime
import pretty
from flask_restx import Resource, Namespace, reqparse, fields
@ -13,7 +12,7 @@ from subtitles.mass_download import movies_download_subtitles
from app.event_handler import event_stream
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessMovie
from ..utils import authenticate, postprocess
api_ns_movies_blacklist = Namespace('Movies Blacklist', description='List, add or remove subtitles to or from '
'movies blacklist')
@ -54,18 +53,17 @@ class MoviesBlacklist(Resource):
TableBlacklistMovie.language,
TableBlacklistMovie.timestamp)\
.join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\
.order_by(TableBlacklistMovie.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
data = list(data)
.order_by(TableBlacklistMovie.timestamp.desc())
if length > 0:
data = data.limit(length).offset(start)
data = list(data.dicts())
for item in data:
postprocessMovie(item)
postprocess(item)
# Make timestamp pretty
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item.update({'timestamp': pretty.date(datetime.datetime.fromtimestamp(item['timestamp']))})
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item.update({'timestamp': pretty.date(item['timestamp'])})
return data

View File

@ -15,7 +15,7 @@ from app.config import settings
from utilities.path_mappings import path_mappings
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessMovie
from api.utils import authenticate, postprocess
api_ns_movies_history = Namespace('Movies History', description='List movies history events')
@ -70,8 +70,7 @@ class MoviesHistory(Resource):
upgradable_movies_not_perfect = []
if settings.general.getboolean('upgrade_subs'):
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime.datetime(1970, 1, 1)).total_seconds()
minimum_timestamp = (datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 6]
@ -86,10 +85,14 @@ class MoviesHistory(Resource):
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
TableHistoryMovie.score,
TableMovies.tags,
TableMovies.monitored)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(reduce(operator.and_, upgradable_movies_conditions))\
.group_by(TableHistoryMovie.video_path)\
TableMovies.monitored) \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(reduce(operator.and_, upgradable_movies_conditions)) \
.group_by(TableHistoryMovie.video_path,
TableHistoryMovie.score,
TableMovies.tags,
TableMovies.monitored
) \
.dicts()
upgradable_movies = list(upgradable_movies)
@ -122,14 +125,13 @@ class MoviesHistory(Resource):
TableHistoryMovie.subs_id,
TableHistoryMovie.provider,
TableHistoryMovie.subtitles_path,
TableHistoryMovie.video_path)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(query_condition)\
.order_by(TableHistoryMovie.timestamp.desc())\
.limit(length)\
.offset(start)\
.dicts()
movie_history = list(movie_history)
TableHistoryMovie.video_path) \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(query_condition) \
.order_by(TableHistoryMovie.timestamp.desc())
if length > 0:
movie_history = movie_history.limit(length).offset(start)
movie_history = list(movie_history.dicts())
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
blacklist_db = list(blacklist_db)
@ -137,24 +139,25 @@ class MoviesHistory(Resource):
for item in movie_history:
# Mark movies as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score']),
"tags": str(item['tags']), "monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": str(item['score']),
"tags": str(item['tags']),
"monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
if os.path.exists(path_mappings.path_replace_movie(item['subtitles_path'])) and \
os.path.exists(path_mappings.path_replace_movie(item['video_path'])):
item.update({"upgradable": True})
del item['path']
postprocessMovie(item)
postprocess(item)
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = int(item['timestamp'])
item["parsed_timestamp"] = datetime.datetime.fromtimestamp(int(item['timestamp'])).strftime('%x %X')
item['timestamp'] = pretty.date(item["raw_timestamp"])
item["raw_timestamp"] = item['timestamp'].timestamp()
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
@ -165,9 +168,9 @@ class MoviesHistory(Resource):
item.update({"blacklisted": True})
break
count = TableHistoryMovie.select()\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(TableMovies.title.is_null(False))\
count = TableHistoryMovie.select() \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(TableMovies.title.is_null(False)) \
.count()
return {'data': movie_history, 'total': count}

View File

@ -9,8 +9,7 @@ from subtitles.wanted import wanted_search_missing_subtitles_movies
from subtitles.mass_download import movies_download_subtitles
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from ..utils import authenticate, postprocessMovie, None_Keys
from api.utils import authenticate, None_Keys, postprocess
api_ns_movies = Namespace('Movies', description='List movies metadata, update movie languages profile or run actions '
'for specific movies.')
@ -82,10 +81,13 @@ class Movies(Resource):
.order_by(TableMovies.sortTitle)\
.dicts()
else:
result = TableMovies.select().order_by(TableMovies.sortTitle).limit(length).offset(start).dicts()
result = TableMovies.select().order_by(TableMovies.sortTitle)
if length > 0:
result = result.limit(length).offset(start)
result = result.dicts()
result = list(result)
for item in result:
postprocessMovie(item)
postprocess(item)
return {'data': result, 'total': count}

View File

@ -1,5 +1,6 @@
# coding=utf-8
import contextlib
import os
import logging
@ -20,7 +21,6 @@ from app.config import settings
from ..utils import authenticate
api_ns_movies_subtitles = Namespace('Movies Subtitles', description='Download, upload or delete movies subtitles')
@ -42,12 +42,13 @@ class MoviesSubtitles(Resource):
args = self.patch_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)\
.where(TableMovies.radarrId == radarrId)\
.dicts()\
movieInfo = TableMovies.select(
TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
if not movieInfo:
@ -57,19 +58,18 @@ class MoviesSubtitles(Resource):
sceneName = movieInfo['sceneName'] or 'None'
title = movieInfo['title']
audio_language = movieInfo['audio_language']
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
try:
with contextlib.suppress(OSError):
result = list(generate_subtitles(moviePath, [(language, hi, forced)], audio_language,
sceneName, title, 'movie', profile_id=get_profile_id(movie_id=radarrId)))
if result:
@ -78,9 +78,9 @@ class MoviesSubtitles(Resource):
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
language_code = f"{result[2]}:hi"
elif forced:
language_code = result[2] + ":forced"
language_code = f"{result[2]}:forced"
else:
language_code = result[2]
provider = result[3]
@ -92,9 +92,6 @@ class MoviesSubtitles(Resource):
store_subtitles_movie(path, moviePath)
else:
event_stream(type='movie', payload=radarrId)
except OSError:
pass
return '', 204
# POST: Upload Subtitles
@ -134,8 +131,8 @@ class MoviesSubtitles(Resource):
audioLanguage = movieInfo['audio_language']
language = args.get('language')
forced = True if args.get('forced') == 'true' else False
hi = True if args.get('hi') == 'true' else False
forced = args.get('forced') == 'true'
hi = args.get('hi') == 'true'
subFile = args.get('file')
_, ext = os.path.splitext(subFile.filename)
@ -143,7 +140,7 @@ class MoviesSubtitles(Resource):
if not isinstance(ext, str) or ext.lower() not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
try:
with contextlib.suppress(OSError):
result = manual_upload_subtitle(path=moviePath,
language=language,
forced=forced,
@ -161,9 +158,9 @@ class MoviesSubtitles(Resource):
path = result[1]
subs_path = result[2]
if hi:
language_code = language + ":hi"
language_code = f"{language}:hi"
elif forced:
language_code = language + ":forced"
language_code = f"{language}:forced"
else:
language_code = language
provider = "manual"
@ -172,9 +169,6 @@ class MoviesSubtitles(Resource):
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, message)
store_subtitles_movie(path, moviePath)
except OSError:
pass
return '', 204
# DELETE: Delete Subtitles

View File

@ -8,7 +8,7 @@ from functools import reduce
from app.database import get_exclusion_clause, TableMovies
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocessMovie
from api.utils import authenticate, postprocess
api_ns_movies_wanted = Namespace('Movies Wanted', description='List movies wanted subtitles')
@ -75,14 +75,14 @@ class MoviesWanted(Resource):
TableMovies.tags,
TableMovies.monitored)\
.where(wanted_condition)\
.order_by(TableMovies.rowid.desc())\
.limit(length)\
.offset(start)\
.dicts()
.order_by(TableMovies.rowid.desc())
if length > 0:
result = result.limit(length).offset(start)
result = result.dicts()
result = list(result)
for item in result:
postprocessMovie(item)
postprocess(item)
count_conditions = [(TableMovies.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('movie')

View File

@ -13,7 +13,6 @@ from subtitles.indexer.series import store_subtitles
from ..utils import authenticate
api_ns_providers_episodes = Namespace('Providers Episodes', description='List and download episodes subtitles manually')
@ -49,10 +48,10 @@ class ProviderEpisodes(Resource):
args = self.get_request_parser.parse_args()
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableShows.title,
TableShows.profileId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
@ -62,7 +61,7 @@ class ProviderEpisodes(Resource):
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] or "None"
sceneName = episodeInfo['sceneName'] or "None"
profileId = episodeInfo['profileId']
providers_list = get_providers()
@ -92,9 +91,11 @@ class ProviderEpisodes(Resource):
args = self.post_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.scene_name,
TableShows.title) \
episodeInfo = TableEpisodes.select(
TableEpisodes.audio_language,
TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
@ -105,7 +106,7 @@ class ProviderEpisodes(Resource):
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['scene_name'] or "None"
sceneName = episodeInfo['sceneName'] or "None"
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
@ -113,7 +114,7 @@ class ProviderEpisodes(Resource):
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(episode_id=sonarrEpisodeId)
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:

View File

@ -110,7 +110,7 @@ class ProviderMovies(Resource):
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(movie_id=radarrId)
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:

View File

@ -4,6 +4,7 @@ import operator
from flask_restx import Resource, Namespace, reqparse, fields
from functools import reduce
from peewee import fn, JOIN
from app.database import get_exclusion_clause, TableEpisodes, TableShows
from subtitles.indexer.series import list_missing_subtitles, series_scan_subtitles
@ -12,8 +13,7 @@ from subtitles.wanted import wanted_search_missing_subtitles_series
from app.event_handler import event_stream
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from ..utils import authenticate, postprocessSeries, None_Keys
from api.utils import authenticate, None_Keys, postprocess
api_ns_series = Namespace('Series', description='List series metadata, update series languages profile or run actions '
'for specific series.')
@ -34,8 +34,8 @@ class Series(Resource):
data_model = api_ns_series.model('series_data_model', {
'alternativeTitles': fields.List(fields.String),
'audio_language': fields.Nested(get_audio_language_model),
'episodeFileCount': fields.Integer(),
'episodeMissingCount': fields.Integer(),
'episodeFileCount': fields.Integer(default=0),
'episodeMissingCount': fields.Integer(default=0),
'fanart': fields.String(),
'imdbId': fields.String(),
'monitored': fields.Boolean(),
@ -70,40 +70,37 @@ class Series(Resource):
seriesId = args.get('seriesid[]')
count = TableShows.select().count()
episodeFileCount = TableEpisodes.select(TableShows.sonarrSeriesId,
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeFileCount')) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.group_by(TableShows.sonarrSeriesId).alias('episodeFileCount')
episodes_missing_conditions = [(TableEpisodes.missing_subtitles != '[]')]
episodes_missing_conditions += get_exclusion_clause('series')
episodeMissingCount = (TableEpisodes.select(TableShows.sonarrSeriesId,
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeMissingCount'))
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))
.where(reduce(operator.and_, episodes_missing_conditions)).group_by(
TableShows.sonarrSeriesId).alias('episodeMissingCount'))
result = TableShows.select(TableShows, episodeFileCount.c.episodeFileCount,
episodeMissingCount.c.episodeMissingCount).join(episodeFileCount,
join_type=JOIN.LEFT_OUTER, on=(
TableShows.sonarrSeriesId ==
episodeFileCount.c.sonarrSeriesId)
) \
.join(episodeMissingCount, join_type=JOIN.LEFT_OUTER,
on=(TableShows.sonarrSeriesId == episodeMissingCount.c.sonarrSeriesId)).order_by(TableShows.sortTitle)
if len(seriesId) != 0:
result = TableShows.select() \
.where(TableShows.sonarrSeriesId.in_(seriesId)) \
.order_by(TableShows.sortTitle).dicts()
else:
result = TableShows.select().order_by(TableShows.sortTitle).limit(length).offset(start).dicts()
result = list(result)
result = result.where(TableShows.sonarrSeriesId.in_(seriesId))
elif length > 0:
result = result.limit(length).offset(start)
result = list(result.dicts())
for item in result:
postprocessSeries(item)
# Add missing subtitles episode count
episodes_missing_conditions = [(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']),
(TableEpisodes.missing_subtitles != '[]')]
episodes_missing_conditions += get_exclusion_clause('series')
episodeMissingCount = TableEpisodes.select(TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, episodes_missing_conditions)) \
.count()
item.update({"episodeMissingCount": episodeMissingCount})
# Add episode count
episodeFileCount = TableEpisodes.select(TableShows.tags,
TableEpisodes.monitored,
TableShows.seriesType) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrSeriesId == item['sonarrSeriesId']) \
.count()
item.update({"episodeFileCount": episodeFileCount})
postprocess(item)
return {'data': result, 'total': count}

View File

@ -6,7 +6,7 @@ from flask import request, jsonify
from flask_restx import Resource, Namespace
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, \
TableSettingsNotifier
TableSettingsNotifier, update_profile_id_list
from app.event_handler import event_stream
from app.config import settings, save_settings, get_settings
from app.scheduler import scheduler
@ -92,6 +92,9 @@ class SystemSettings(Resource):
# Remove deleted profiles
TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute()
# invalidate cache
update_profile_id_list.invalidate()
event_stream("languages")
if settings.general.getboolean('use_sonarr'):

View File

@ -36,186 +36,55 @@ def authenticate(actual_method):
def postprocess(item):
# Remove ffprobe_cache
if 'ffprobe_cache' in item:
del (item['ffprobe_cache'])
if item.get('movie_file_id'):
path_replace = path_mappings.path_replace_movie
else:
path_replace = path_mappings.path_replace
if item.get('ffprobe_cache'):
del item['ffprobe_cache']
# Parse tags
if 'tags' in item:
if item['tags'] is None:
item['tags'] = []
else:
item['tags'] = ast.literal_eval(item['tags'])
if 'monitored' in item:
if item['monitored'] is None:
item['monitored'] = False
else:
item['monitored'] = item['monitored'] == 'True'
if 'hearing_impaired' in item and item['hearing_impaired'] is not None:
if item['hearing_impaired'] is None:
item['hearing_impaired'] = False
else:
item['hearing_impaired'] = item['hearing_impaired'] == 'True'
if 'language' in item:
if item['language'] == 'None':
item['language'] = None
elif item['language'] is not None:
splitted_language = item['language'].split(':')
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
"code2": splitted_language[0],
"code3": alpha3_from_alpha2(splitted_language[0]),
"forced": True if item['language'].endswith(':forced') else False,
"hi": True if item['language'].endswith(':hi') else False}
def postprocessSeries(item):
postprocess(item)
# Parse audio language
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(series_id=item['sonarrSeriesId'])
if item.get('audio_language') is not None:
item['audio_language'] = get_audio_profile_languages(item['audio_language'])
# Make sure profileId is a valid None value
if 'profileId' in item and item['profileId'] in None_Keys:
item['profileId'] = None
if 'alternateTitles' in item:
if item['alternateTitles'] is None:
item['alternativeTitles'] = []
else:
item['alternativeTitles'] = ast.literal_eval(item['alternateTitles'])
del item["alternateTitles"]
# Parse seriesType
if 'seriesType' in item and item['seriesType'] is not None:
item['seriesType'] = item['seriesType'].capitalize()
if 'path' in item:
item['path'] = path_mappings.path_replace(item['path'])
# map poster and fanart to server proxy
if 'poster' in item:
poster = item['poster']
item['poster'] = f"{base_url}/images/series{poster}" if poster else None
if 'fanart' in item:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/series{fanart}" if fanart else None
def postprocessEpisode(item):
postprocess(item)
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(episode_id=item['sonarrEpisodeId'])
if 'subtitles' in item:
if item['subtitles'] is None:
raw_subtitles = []
else:
raw_subtitles = ast.literal_eval(item['subtitles'])
subtitles = []
for subs in raw_subtitles:
subtitle = subs[0].split(':')
sub = {"name": language_from_alpha2(subtitle[0]),
"code2": subtitle[0],
"code3": alpha3_from_alpha2(subtitle[0]),
"path": path_mappings.path_replace(subs[1]),
"forced": False,
"hi": False}
if len(subtitle) > 1:
sub["forced"] = True if subtitle[1] == 'forced' else False
sub["hi"] = True if subtitle[1] == 'hi' else False
subtitles.append(sub)
item.update({"subtitles": subtitles})
# Parse missing subtitles
if 'missing_subtitles' in item:
if item['missing_subtitles'] is None:
item['missing_subtitles'] = []
else:
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
for i, subs in enumerate(item['missing_subtitles']):
subtitle = subs.split(':')
item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]),
"code2": subtitle[0],
"code3": alpha3_from_alpha2(subtitle[0]),
"forced": False,
"hi": False}
if len(subtitle) > 1:
item['missing_subtitles'][i].update({
"forced": True if subtitle[1] == 'forced' else False,
"hi": True if subtitle[1] == 'hi' else False
})
if 'scene_name' in item:
item["sceneName"] = item["scene_name"]
del item["scene_name"]
if 'path' in item and item['path']:
# Provide mapped path
item['path'] = path_mappings.path_replace(item['path'])
# TODO: Move
def postprocessMovie(item):
postprocess(item)
# Parse audio language
if 'audio_language' in item and item['audio_language'] is not None:
item['audio_language'] = get_audio_profile_languages(movie_id=item['radarrId'])
# Make sure profileId is a valid None value
if 'profileId' in item and item['profileId'] in None_Keys:
if item.get('profileId') and item['profileId'] in None_Keys:
item['profileId'] = None
# Parse alternate titles
if 'alternativeTitles' in item:
if item['alternativeTitles'] is None:
item['alternativeTitles'] = []
else:
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
if item.get('alternativeTitles'):
item['alternativeTitles'] = ast.literal_eval(item['alternativeTitles'])
# Parse failed attempts
if 'failedAttempts' in item:
if item['failedAttempts']:
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
if item.get('failedAttempts'):
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
# Parse subtitles
if 'subtitles' in item:
if item['subtitles'] is None:
item['subtitles'] = []
else:
item['subtitles'] = ast.literal_eval(item['subtitles'])
if item.get('subtitles'):
item['subtitles'] = ast.literal_eval(item['subtitles'])
for i, subs in enumerate(item['subtitles']):
language = subs[0].split(':')
item['subtitles'][i] = {"path": path_mappings.path_replace_movie(subs[1]),
item['subtitles'][i] = {"path": path_replace(subs[1]),
"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False}
if len(language) > 1:
item['subtitles'][i].update({
"forced": True if language[1] == 'forced' else False,
"hi": True if language[1] == 'hi' else False
})
if settings.general.getboolean('embedded_subs_show_desired'):
item['subtitles'][i].update(
{
"forced": language[1] == 'forced',
"hi": language[1] == 'hi',
}
)
if settings.general.getboolean('embedded_subs_show_desired') and item.get('profileId'):
desired_lang_list = get_desired_languages(item['profileId'])
item['subtitles'] = [x for x in item['subtitles'] if x['code2'] in desired_lang_list or x['path']]
if item['subtitles']:
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
item['subtitles'] = sorted(item['subtitles'], key=itemgetter('name', 'forced'))
# Parse missing subtitles
if 'missing_subtitles' in item:
if item['missing_subtitles'] is None:
item['missing_subtitles'] = []
else:
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
if item.get('missing_subtitles'):
item['missing_subtitles'] = ast.literal_eval(item['missing_subtitles'])
for i, subs in enumerate(item['missing_subtitles']):
language = subs.split(':')
item['missing_subtitles'][i] = {"name": language_from_alpha2(language[0]),
@ -224,25 +93,50 @@ def postprocessMovie(item):
"forced": False,
"hi": False}
if len(language) > 1:
item['missing_subtitles'][i].update({
"forced": True if language[1] == 'forced' else False,
"hi": True if language[1] == 'hi' else False
})
item['missing_subtitles'][i].update(
{
"forced": language[1] == 'forced',
"hi": language[1] == 'hi',
}
)
# Provide mapped path
if 'path' in item:
if item['path']:
item['path'] = path_mappings.path_replace_movie(item['path'])
# Parse tags
if item.get('tags') is not None:
item['tags'] = ast.literal_eval(item.get('tags', '[]'))
if item.get('monitored'):
item['monitored'] = item.get('monitored') == 'True'
if item.get('hearing_impaired'):
item['hearing_impaired'] = item.get('hearing_impaired') == 'True'
if 'subtitles_path' in item:
if item.get('language'):
if item['language'] == 'None':
item['language'] = None
if item['language'] is not None:
splitted_language = item['language'].split(':')
item['language'] = {
"name": language_from_alpha2(splitted_language[0]),
"code2": splitted_language[0],
"code3": alpha3_from_alpha2(splitted_language[0]),
"forced": bool(item['language'].endswith(':forced')),
"hi": bool(item['language'].endswith(':hi')),
}
# Parse seriesType
if item.get('seriesType') is not None:
item['seriesType'] = item['seriesType'].capitalize()
if item.get('path'):
item['path'] = path_replace(item['path'])
if item.get('subtitles_path'):
# Provide mapped subtitles path
item['subtitles_path'] = path_mappings.path_replace_movie(item['subtitles_path'])
item['subtitles_path'] = path_replace(item['subtitles_path'])
# map poster and fanart to server proxy
if 'poster' in item:
if item.get('poster') is not None:
poster = item['poster']
item['poster'] = f"{base_url}/images/movies{poster}" if poster else None
item['poster'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{poster}" if poster else None
if 'fanart' in item:
if item.get('fanart') is not None:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/movies{fanart}" if fanart else None
item['fanart'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{fanart}" if fanart else None

View File

@ -5,6 +5,7 @@ from flask import Flask, redirect
from flask_cors import CORS
from flask_socketio import SocketIO
from .database import database
from .get_args import args
from .config import settings, base_url
@ -37,6 +38,19 @@ def create_app():
def page_not_found(_):
return redirect(base_url, code=302)
# This hook ensures that a connection is opened to handle any queries
# generated by the request.
@app.before_request
def _db_connect():
database.connect()
# This hook ensures that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
if not database.is_closed():
database.close()
return app

View File

@ -261,6 +261,14 @@ defaults = {
"streaming_service": 1,
"edition": 1,
"hearing_impaired": 1,
},
'postgresql': {
'enabled': 'False',
'host': 'localhost',
'port': '5432',
'database': '',
'username': '',
'password': '',
}
}

View File

@ -1,31 +1,58 @@
# -*- coding: utf-8 -*-
import logging
import os
import atexit
import json
import ast
import time
from datetime import datetime
from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField
from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField, BigIntegerField, \
DateTimeField
from playhouse.sqliteq import SqliteQueueDatabase
from playhouse.migrate import SqliteMigrator, migrate
from playhouse.sqlite_ext import RowIDField
from dogpile.cache import make_region
from utilities.path_mappings import path_mappings
from peewee import PostgresqlDatabase
from playhouse.migrate import PostgresqlMigrator
from .config import settings, get_array_from
from .get_args import args
database = SqliteQueueDatabase(os.path.join(args.config_dir, 'db', 'bazarr.db'),
use_gevent=False,
autostart=True,
queue_max_size=256)
migrator = SqliteMigrator(database)
logger = logging.getLogger(__name__)
postgresql = settings.postgresql.getboolean('enabled')
region = make_region().configure('dogpile.cache.memory')
if postgresql:
logger.debug(
f"Connecting to PostgreSQL database: {settings.postgresql.host}:{settings.postgresql.port}/{settings.postgresql.database}")
database = PostgresqlDatabase(settings.postgresql.database,
user=settings.postgresql.username,
password=settings.postgresql.password,
host=settings.postgresql.host,
port=settings.postgresql.port,
autoconnect=True
)
migrator = PostgresqlMigrator(database)
else:
db_path = os.path.join(args.config_dir, 'db', 'bazarr.db')
logger.debug(f"Connecting to SQLite database: {db_path}")
database = SqliteQueueDatabase(db_path,
use_gevent=False,
autostart=True,
queue_max_size=256)
migrator = SqliteMigrator(database)
@atexit.register
def _stop_worker_threads():
database.stop()
if not postgresql:
database.stop()
class UnknownField(object):
@ -52,7 +79,7 @@ class TableBlacklist(BaseModel):
sonarr_episode_id = IntegerField(null=True)
sonarr_series_id = IntegerField(null=True)
subs_id = TextField(null=True)
timestamp = IntegerField(null=True)
timestamp = DateTimeField(null=True)
class Meta:
table_name = 'table_blacklist'
@ -64,7 +91,7 @@ class TableBlacklistMovie(BaseModel):
provider = TextField(null=True)
radarr_id = IntegerField(null=True)
subs_id = TextField(null=True)
timestamp = IntegerField(null=True)
timestamp = DateTimeField(null=True)
class Meta:
table_name = 'table_blacklist_movie'
@ -79,13 +106,13 @@ class TableEpisodes(BaseModel):
episode_file_id = IntegerField(null=True)
failedAttempts = TextField(null=True)
ffprobe_cache = BlobField(null=True)
file_size = IntegerField(default=0, null=True)
file_size = BigIntegerField(default=0, null=True)
format = TextField(null=True)
missing_subtitles = TextField(null=True)
monitored = TextField(null=True)
path = TextField()
resolution = TextField(null=True)
scene_name = TextField(null=True)
sceneName = TextField(null=True)
season = IntegerField()
sonarrEpisodeId = IntegerField(unique=True)
sonarrSeriesId = IntegerField()
@ -104,12 +131,12 @@ class TableHistory(BaseModel):
id = AutoField()
language = TextField(null=True)
provider = TextField(null=True)
score = TextField(null=True)
score = IntegerField(null=True)
sonarrEpisodeId = IntegerField()
sonarrSeriesId = IntegerField()
subs_id = TextField(null=True)
subtitles_path = TextField(null=True)
timestamp = IntegerField()
timestamp = DateTimeField()
video_path = TextField(null=True)
class Meta:
@ -123,10 +150,10 @@ class TableHistoryMovie(BaseModel):
language = TextField(null=True)
provider = TextField(null=True)
radarrId = IntegerField()
score = TextField(null=True)
score = IntegerField(null=True)
subs_id = TextField(null=True)
subtitles_path = TextField(null=True)
timestamp = IntegerField()
timestamp = DateTimeField()
video_path = TextField(null=True)
class Meta:
@ -154,7 +181,7 @@ class TableMovies(BaseModel):
failedAttempts = TextField(null=True)
fanart = TextField(null=True)
ffprobe_cache = BlobField(null=True)
file_size = IntegerField(default=0, null=True)
file_size = BigIntegerField(default=0, null=True)
format = TextField(null=True)
imdbId = TextField(null=True)
missing_subtitles = TextField(null=True)
@ -211,7 +238,7 @@ class TableSettingsNotifier(BaseModel):
class TableShows(BaseModel):
alternateTitles = TextField(null=True)
alternativeTitles = TextField(null=True)
audio_language = TextField(null=True)
fanart = TextField(null=True)
imdbId = TextField(default='""', null=True)
@ -296,51 +323,185 @@ def init_db():
def migrate_db():
migrate(
migrator.add_column('table_shows', 'year', TextField(null=True)),
migrator.add_column('table_shows', 'alternateTitles', TextField(null=True)),
migrator.add_column('table_shows', 'tags', TextField(default='[]', null=True)),
migrator.add_column('table_shows', 'seriesType', TextField(default='""', null=True)),
migrator.add_column('table_shows', 'imdbId', TextField(default='""', null=True)),
migrator.add_column('table_shows', 'profileId', IntegerField(null=True)),
migrator.add_column('table_shows', 'monitored', TextField(null=True)),
migrator.add_column('table_episodes', 'format', TextField(null=True)),
migrator.add_column('table_episodes', 'resolution', TextField(null=True)),
migrator.add_column('table_episodes', 'video_codec', TextField(null=True)),
migrator.add_column('table_episodes', 'audio_codec', TextField(null=True)),
migrator.add_column('table_episodes', 'episode_file_id', IntegerField(null=True)),
migrator.add_column('table_episodes', 'audio_language', TextField(null=True)),
migrator.add_column('table_episodes', 'file_size', IntegerField(default=0, null=True)),
migrator.add_column('table_episodes', 'ffprobe_cache', BlobField(null=True)),
migrator.add_column('table_movies', 'sortTitle', TextField(null=True)),
migrator.add_column('table_movies', 'year', TextField(null=True)),
migrator.add_column('table_movies', 'alternativeTitles', TextField(null=True)),
migrator.add_column('table_movies', 'format', TextField(null=True)),
migrator.add_column('table_movies', 'resolution', TextField(null=True)),
migrator.add_column('table_movies', 'video_codec', TextField(null=True)),
migrator.add_column('table_movies', 'audio_codec', TextField(null=True)),
migrator.add_column('table_movies', 'imdbId', TextField(null=True)),
migrator.add_column('table_movies', 'movie_file_id', IntegerField(null=True)),
migrator.add_column('table_movies', 'tags', TextField(default='[]', null=True)),
migrator.add_column('table_movies', 'profileId', IntegerField(null=True)),
migrator.add_column('table_movies', 'file_size', IntegerField(default=0, null=True)),
migrator.add_column('table_movies', 'ffprobe_cache', BlobField(null=True)),
migrator.add_column('table_history', 'video_path', TextField(null=True)),
migrator.add_column('table_history', 'language', TextField(null=True)),
migrator.add_column('table_history', 'provider', TextField(null=True)),
migrator.add_column('table_history', 'score', TextField(null=True)),
migrator.add_column('table_history', 'subs_id', TextField(null=True)),
migrator.add_column('table_history', 'subtitles_path', TextField(null=True)),
migrator.add_column('table_history_movie', 'video_path', TextField(null=True)),
migrator.add_column('table_history_movie', 'language', TextField(null=True)),
migrator.add_column('table_history_movie', 'provider', TextField(null=True)),
migrator.add_column('table_history_movie', 'score', TextField(null=True)),
migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)),
migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)),
migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)),
migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)),
migrator.add_column('table_languages_profiles', 'originalFormat', BooleanField(null=True)),
)
table_shows = [t.name for t in database.get_columns('table_shows')]
table_episodes = [t.name for t in database.get_columns('table_episodes')]
table_movies = [t.name for t in database.get_columns('table_movies')]
table_history = [t.name for t in database.get_columns('table_history')]
table_history_movie = [t.name for t in database.get_columns('table_history_movie')]
table_languages_profiles = [t.name for t in database.get_columns('table_languages_profiles')]
if "year" not in table_shows:
migrate(migrator.add_column('table_shows', 'year', TextField(null=True)))
if "alternativeTitle" not in table_shows:
migrate(migrator.add_column('table_shows', 'alternativeTitle', TextField(null=True)))
if "tags" not in table_shows:
migrate(migrator.add_column('table_shows', 'tags', TextField(default='[]', null=True)))
if "seriesType" not in table_shows:
migrate(migrator.add_column('table_shows', 'seriesType', TextField(default='""', null=True)))
if "imdbId" not in table_shows:
migrate(migrator.add_column('table_shows', 'imdbId', TextField(default='""', null=True)))
if "profileId" not in table_shows:
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
if "profileId" not in table_shows:
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
if "monitored" not in table_shows:
migrate(migrator.add_column('table_shows', 'monitored', TextField(null=True)))
if "format" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'format', TextField(null=True)))
if "resolution" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'resolution', TextField(null=True)))
if "video_codec" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'video_codec', TextField(null=True)))
if "audio_codec" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'audio_codec', TextField(null=True)))
if "episode_file_id" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'episode_file_id', IntegerField(null=True)))
if "audio_language" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'audio_language', TextField(null=True)))
if "file_size" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'file_size', BigIntegerField(default=0, null=True)))
if "ffprobe_cache" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'ffprobe_cache', BlobField(null=True)))
if "sortTitle" not in table_movies:
migrate(migrator.add_column('table_movies', 'sortTitle', TextField(null=True)))
if "year" not in table_movies:
migrate(migrator.add_column('table_movies', 'year', TextField(null=True)))
if "alternativeTitles" not in table_movies:
migrate(migrator.add_column('table_movies', 'alternativeTitles', TextField(null=True)))
if "format" not in table_movies:
migrate(migrator.add_column('table_movies', 'format', TextField(null=True)))
if "resolution" not in table_movies:
migrate(migrator.add_column('table_movies', 'resolution', TextField(null=True)))
if "video_codec" not in table_movies:
migrate(migrator.add_column('table_movies', 'video_codec', TextField(null=True)))
if "audio_codec" not in table_movies:
migrate(migrator.add_column('table_movies', 'audio_codec', TextField(null=True)))
if "imdbId" not in table_movies:
migrate(migrator.add_column('table_movies', 'imdbId', TextField(null=True)))
if "movie_file_id" not in table_movies:
migrate(migrator.add_column('table_movies', 'movie_file_id', IntegerField(null=True)))
if "tags" not in table_movies:
migrate(migrator.add_column('table_movies', 'tags', TextField(default='[]', null=True)))
if "profileId" not in table_movies:
migrate(migrator.add_column('table_movies', 'profileId', IntegerField(null=True)))
if "file_size" not in table_movies:
migrate(migrator.add_column('table_movies', 'file_size', BigIntegerField(default=0, null=True)))
if "ffprobe_cache" not in table_movies:
migrate(migrator.add_column('table_movies', 'ffprobe_cache', BlobField(null=True)))
if "video_path" not in table_history:
migrate(migrator.add_column('table_history', 'video_path', TextField(null=True)))
if "language" not in table_history:
migrate(migrator.add_column('table_history', 'language', TextField(null=True)))
if "provider" not in table_history:
migrate(migrator.add_column('table_history', 'provider', TextField(null=True)))
if "score" not in table_history:
migrate(migrator.add_column('table_history', 'score', TextField(null=True)))
if "subs_id" not in table_history:
migrate(migrator.add_column('table_history', 'subs_id', TextField(null=True)))
if "subtitles_path" not in table_history:
migrate(migrator.add_column('table_history', 'subtitles_path', TextField(null=True)))
if "video_path" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'video_path', TextField(null=True)))
if "language" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'language', TextField(null=True)))
if "provider" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'provider', TextField(null=True)))
if "score" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'score', TextField(null=True)))
if "subs_id" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)))
if "subtitles_path" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)))
if "mustContain" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)))
if "mustNotContain" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)))
if "originalFormat" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'originalFormat', BooleanField(null=True)))
if "languages" in table_shows:
migrate(migrator.drop_column('table_shows', 'languages'))
if "hearing_impaired" in table_shows:
migrate(migrator.drop_column('table_shows', 'hearing_impaired'))
if "languages" in table_movies:
migrate(migrator.drop_column('table_movies', 'languages'))
if "hearing_impaired" in table_movies:
migrate(migrator.drop_column('table_movies', 'hearing_impaired'))
if not any(
x
for x in database.get_columns('table_blacklist')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_blacklist', 'timestamp', DateTimeField(default=datetime.now)))
update = TableBlacklist.select()
for item in update:
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
if not any(
x
for x in database.get_columns('table_blacklist_movie')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_blacklist_movie', 'timestamp', DateTimeField(default=datetime.now)))
update = TableBlacklistMovie.select()
for item in update:
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
if not any(
x for x in database.get_columns('table_history') if x.name == "score" and x.data_type.lower() == "integer"):
migrate(migrator.alter_column_type('table_history', 'score', IntegerField(null=True)))
if not any(
x
for x in database.get_columns('table_history')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_history', 'timestamp', DateTimeField(default=datetime.now)))
update = TableHistory.select()
list_to_update = []
for i, item in enumerate(update):
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
list_to_update.append(item)
if i % 100 == 0:
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
list_to_update = []
if list_to_update:
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
if not any(x for x in database.get_columns('table_history_movie') if
x.name == "score" and x.data_type.lower() == "integer"):
migrate(migrator.alter_column_type('table_history_movie', 'score', IntegerField(null=True)))
if not any(
x
for x in database.get_columns('table_history_movie')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_history_movie', 'timestamp', DateTimeField(default=datetime.now)))
update = TableHistoryMovie.select()
list_to_update = []
for i, item in enumerate(update):
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
list_to_update.append(item)
if i % 100 == 0:
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
list_to_update = []
if list_to_update:
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
# if not any(x for x in database.get_columns('table_movies') if x.name == "monitored" and x.data_type == "BOOLEAN"):
# migrate(migrator.alter_column_type('table_movies', 'monitored', BooleanField(null=True)))
if database.get_columns('table_settings_providers'):
database.execute_sql('drop table if exists table_settings_providers;')
if "alternateTitles" in table_shows:
migrate(migrator.rename_column('table_shows', 'alternateTitles', "alternativeTitles"))
if "scene_name" in table_episodes:
migrate(migrator.rename_column('table_episodes', 'scene_name', "sceneName"))
class SqliteDictPathMapper:
@ -376,21 +537,21 @@ def get_exclusion_clause(exclusion_type):
if exclusion_type == 'series':
tagsList = ast.literal_eval(settings.sonarr.excluded_tags)
for tag in tagsList:
where_clause.append(~(TableShows.tags.contains("\'"+tag+"\'")))
where_clause.append(~(TableShows.tags.contains("\'" + tag + "\'")))
else:
tagsList = ast.literal_eval(settings.radarr.excluded_tags)
for tag in tagsList:
where_clause.append(~(TableMovies.tags.contains("\'"+tag+"\'")))
where_clause.append(~(TableMovies.tags.contains("\'" + tag + "\'")))
if exclusion_type == 'series':
monitoredOnly = settings.sonarr.getboolean('only_monitored')
if monitoredOnly:
where_clause.append((TableEpisodes.monitored == 'True'))
where_clause.append((TableShows.monitored == 'True'))
where_clause.append((TableEpisodes.monitored == True)) # noqa E712
where_clause.append((TableShows.monitored == True)) # noqa E712
else:
monitoredOnly = settings.radarr.getboolean('only_monitored')
if monitoredOnly:
where_clause.append((TableMovies.monitored == 'True'))
where_clause.append((TableMovies.monitored == True)) # noqa E712
if exclusion_type == 'series':
typesList = get_array_from(settings.sonarr.excluded_series_types)
@ -404,6 +565,7 @@ def get_exclusion_clause(exclusion_type):
return where_clause
@region.cache_on_arguments()
def update_profile_id_list():
profile_id_list = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
@ -487,21 +649,12 @@ def get_profile_cutoff(profile_id):
return cutoff_language
def get_audio_profile_languages(series_id=None, episode_id=None, movie_id=None):
def get_audio_profile_languages(audio_languages_list_str):
from languages.get_languages import alpha2_from_language, alpha3_from_language
audio_languages = []
if series_id:
audio_languages_list_str = TableShows.get(TableShows.sonarrSeriesId == series_id).audio_language
elif episode_id:
audio_languages_list_str = TableEpisodes.get(TableEpisodes.sonarrEpisodeId == episode_id).audio_language
elif movie_id:
audio_languages_list_str = TableMovies.get(TableMovies.radarrId == movie_id).audio_language
else:
return audio_languages
try:
audio_languages_list = ast.literal_eval(audio_languages_list_str)
audio_languages_list = ast.literal_eval(audio_languages_list_str or '[]')
except ValueError:
pass
else:
@ -517,22 +670,22 @@ def get_audio_profile_languages(series_id=None, episode_id=None, movie_id=None):
def get_profile_id(series_id=None, episode_id=None, movie_id=None):
if series_id:
data = TableShows.select(TableShows.profileId)\
.where(TableShows.sonarrSeriesId == series_id)\
data = TableShows.select(TableShows.profileId) \
.where(TableShows.sonarrSeriesId == series_id) \
.get_or_none()
if data:
return data.profileId
elif episode_id:
data = TableShows.select(TableShows.profileId)\
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId))\
.where(TableEpisodes.sonarrEpisodeId == episode_id)\
data = TableShows.select(TableShows.profileId) \
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == episode_id) \
.get_or_none()
if data:
return data.profileId
elif movie_id:
data = TableMovies.select(TableMovies.profileId)\
.where(TableMovies.radarrId == movie_id)\
data = TableMovies.select(TableMovies.profileId) \
.where(TableMovies.radarrId == movie_id) \
.get_or_none()
if data:
return data.profileId

View File

@ -65,7 +65,7 @@ import logging # noqa E402
def is_virtualenv():
# return True if Bazarr have been start from within a virtualenv or venv
base_prefix = getattr(sys, "base_prefix", None)
# real_prefix will return None if not in a virtualenv enviroment or the default python path
# real_prefix will return None if not in a virtualenv environment or the default python path
real_prefix = getattr(sys, "real_prefix", None) or sys.prefix
return base_prefix != real_prefix

View File

@ -1,6 +1,6 @@
# coding=utf-8
import time
from datetime import datetime
from app.database import TableBlacklistMovie
from app.event_handler import event_stream
@ -19,7 +19,7 @@ def get_blacklist_movie():
def blacklist_log_movie(radarr_id, provider, subs_id, language):
TableBlacklistMovie.insert({
TableBlacklistMovie.radarr_id: radarr_id,
TableBlacklistMovie.timestamp: time.time(),
TableBlacklistMovie.timestamp: datetime.now(),
TableBlacklistMovie.provider: provider,
TableBlacklistMovie.subs_id: subs_id,
TableBlacklistMovie.language: language

View File

@ -1,6 +1,6 @@
# coding=utf-8
import time
from datetime import datetime
from app.database import TableHistoryMovie
from app.event_handler import event_stream
@ -11,7 +11,7 @@ def history_log_movie(action, radarr_id, description, video_path=None, language=
TableHistoryMovie.insert({
TableHistoryMovie.action: action,
TableHistoryMovie.radarrId: radarr_id,
TableHistoryMovie.timestamp: time.time(),
TableHistoryMovie.timestamp: datetime.now(),
TableHistoryMovie.description: description,
TableHistoryMovie.video_path: video_path,
TableHistoryMovie.language: language,

View File

@ -147,12 +147,12 @@ def update_movies(send_event=True):
# Insert new movies in DB
for added_movie in movies_to_add:
try:
result = TableMovies.insert(added_movie).on_conflict(action='IGNORE').execute()
result = TableMovies.insert(added_movie).on_conflict_ignore().execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {added_movie['path']} because of {e}")
continue
else:
if result > 0:
if result and result > 0:
altered_movies.append([added_movie['tmdbId'],
added_movie['path'],
added_movie['radarrId'],

View File

@ -1,6 +1,6 @@
# coding=utf-8
import time
from datetime import datetime
from app.database import TableBlacklist
from app.event_handler import event_stream
@ -20,7 +20,7 @@ def blacklist_log(sonarr_series_id, sonarr_episode_id, provider, subs_id, langua
TableBlacklist.insert({
TableBlacklist.sonarr_series_id: sonarr_series_id,
TableBlacklist.sonarr_episode_id: sonarr_episode_id,
TableBlacklist.timestamp: time.time(),
TableBlacklist.timestamp: datetime.now(),
TableBlacklist.provider: provider,
TableBlacklist.subs_id: subs_id,
TableBlacklist.language: language

View File

@ -1,6 +1,6 @@
# coding=utf-8
import time
from datetime import datetime
from app.database import TableHistory
from app.event_handler import event_stream
@ -12,7 +12,7 @@ def history_log(action, sonarr_series_id, sonarr_episode_id, description, video_
TableHistory.action: action,
TableHistory.sonarrSeriesId: sonarr_series_id,
TableHistory.sonarrEpisodeId: sonarr_episode_id,
TableHistory.timestamp: time.time(),
TableHistory.timestamp: datetime.now(),
TableHistory.description: description,
TableHistory.video_path: video_path,
TableHistory.language: language,

View File

@ -119,7 +119,7 @@ def sync_episodes(series_id=None, send_event=True):
TableEpisodes.path,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableEpisodes.monitored,
TableEpisodes.format,
TableEpisodes.resolution,
@ -149,12 +149,12 @@ def sync_episodes(series_id=None, send_event=True):
# Insert new episodes in DB
for added_episode in episodes_to_add:
try:
result = TableEpisodes.insert(added_episode).on_conflict(action='IGNORE').execute()
result = TableEpisodes.insert(added_episode).on_conflict_ignore().execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {added_episode['path']} because of {e}")
continue
else:
if result > 0:
if result and result > 0:
altered_episodes.append([added_episode['sonarrEpisodeId'],
added_episode['path'],
added_episode['monitored']])

View File

@ -49,7 +49,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
'audio_language': str(audio_language),
'sortTitle': show['sortTitle'],
'year': str(show['year']),
'alternateTitles': alternate_titles,
'alternativeTitles': alternate_titles,
'tags': str(tags),
'seriesType': show['seriesType'],
'imdbId': imdbId,
@ -65,7 +65,7 @@ def seriesParser(show, action, tags_dict, serie_default_profile, audio_profiles)
'audio_language': str(audio_language),
'sortTitle': show['sortTitle'],
'year': str(show['year']),
'alternateTitles': alternate_titles,
'alternativeTitles': alternate_titles,
'tags': str(tags),
'seriesType': show['seriesType'],
'imdbId': imdbId,
@ -141,7 +141,7 @@ def episodeParser(episode):
'path': episode['episodeFile']['path'],
'season': episode['seasonNumber'],
'episode': episode['episodeNumber'],
'scene_name': sceneName,
'sceneName': sceneName,
'monitored': str(bool(episode['monitored'])),
'format': video_format,
'resolution': video_resolution,

View File

@ -97,7 +97,7 @@ def update_series(send_event=True):
TableShows.audio_language,
TableShows.sortTitle,
TableShows.year,
TableShows.alternateTitles,
TableShows.alternativeTitles,
TableShows.tags,
TableShows.seriesType,
TableShows.imdbId,

View File

@ -42,7 +42,7 @@ def movies_download_subtitles(no):
else:
count_movie = 0
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
audio_language_list = get_audio_profile_languages(movie['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:

View File

@ -26,7 +26,7 @@ def series_download_subtitles(no):
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.seriesType,
TableEpisodes.audio_language,
@ -57,7 +57,7 @@ def series_download_subtitles(no):
value=i,
count=count_episodes_details)
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
audio_language_list = get_audio_profile_languages(episode['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -76,7 +76,7 @@ def series_download_subtitles(no):
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
str(episode['sceneName']),
episode['title'],
'series',
check_if_still_required=True):
@ -112,7 +112,7 @@ def episode_download_subtitles(no, send_progress=False):
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.title,
TableShows.sonarrSeriesId,
@ -142,7 +142,7 @@ def episode_download_subtitles(no, send_progress=False):
value=0,
count=1)
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
audio_language_list = get_audio_profile_languages(episode['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -161,7 +161,7 @@ def episode_download_subtitles(no, send_progress=False):
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
str(episode['sceneName']),
episode['title'],
'series',
check_if_still_required=True):

View File

@ -23,7 +23,7 @@ def refine_from_db(path, video):
TableEpisodes.title.alias('episodeTitle'),
TableShows.year,
TableShows.tvdbId,
TableShows.alternateTitles,
TableShows.alternativeTitles,
TableEpisodes.format,
TableEpisodes.resolution,
TableEpisodes.video_codec,
@ -47,7 +47,7 @@ def refine_from_db(path, video):
video.year = int(data['year'])
video.series_tvdb_id = int(data['tvdbId'])
video.alternative_series = ast.literal_eval(data['alternateTitles'])
video.alternative_series = ast.literal_eval(data['alternativeTitles'])
if data['imdbId'] and not video.series_imdb_id:
video.series_imdb_id = data['imdbId']
if not video.source:

View File

@ -26,8 +26,7 @@ from .download import generate_subtitles
def upgrade_subtitles():
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
minimum_timestamp = ((datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
datetime(1970, 1, 1)).total_seconds()
minimum_timestamp = (datetime.now() - timedelta(days=int(days_to_upgrade_subs)))
if settings.general.getboolean('upgrade_manual'):
query_actions = [1, 2, 3, 4, 6]
@ -41,11 +40,11 @@ def upgrade_subtitles():
upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path,
TableHistory.language,
TableHistory.score,
fn.MAX(TableHistory.score).alias('score'),
TableShows.tags,
TableShows.profileId,
TableEpisodes.audio_language,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableEpisodes.title,
TableEpisodes.sonarrSeriesId,
TableHistory.action,
@ -56,11 +55,26 @@ def upgrade_subtitles():
TableEpisodes.season,
TableEpisodes.episode,
TableShows.title.alias('seriesTitle'),
TableShows.seriesType)\
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId))\
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId))\
.where(reduce(operator.and_, upgradable_episodes_conditions))\
.group_by(TableHistory.video_path, TableHistory.language)\
TableShows.seriesType) \
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.where(reduce(operator.and_, upgradable_episodes_conditions)) \
.group_by(TableHistory.video_path,
TableHistory.language,
TableShows.tags,
TableShows.profileId,
TableEpisodes.audio_language,
TableEpisodes.sceneName,
TableEpisodes.title,
TableEpisodes.sonarrSeriesId,
TableHistory.action,
TableHistory.subtitles_path,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.monitored,
TableEpisodes.season,
TableEpisodes.episode,
TableShows.title.alias('seriesTitle'),
TableShows.seriesType) \
.dicts()
upgradable_episodes_not_perfect = []
for upgradable_episode in upgradable_episodes:
@ -90,7 +104,7 @@ def upgrade_subtitles():
upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
TableHistoryMovie.language,
TableHistoryMovie.score,
fn.MAX(TableHistoryMovie.score).alias('score'),
TableMovies.profileId,
TableHistoryMovie.action,
TableHistoryMovie.subtitles_path,
@ -100,10 +114,21 @@ def upgrade_subtitles():
TableMovies.monitored,
TableMovies.tags,
TableMovies.radarrId,
TableMovies.title)\
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId))\
.where(reduce(operator.and_, upgradable_movies_conditions))\
.group_by(TableHistoryMovie.video_path, TableHistoryMovie.language)\
TableMovies.title) \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(reduce(operator.and_, upgradable_movies_conditions)) \
.group_by(TableHistoryMovie.video_path,
TableHistoryMovie.language,
TableMovies.profileId,
TableHistoryMovie.action,
TableHistoryMovie.subtitles_path,
TableMovies.audio_language,
TableMovies.sceneName,
TableMovies.monitored,
TableMovies.tags,
TableMovies.radarrId,
TableMovies.title
) \
.dicts()
upgradable_movies_not_perfect = []
for upgradable_movie in upgradable_movies:
@ -155,7 +180,7 @@ def upgrade_subtitles():
is_forced = "False"
is_hi = "False"
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
audio_language_list = get_audio_profile_languages(episode['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -164,7 +189,7 @@ def upgrade_subtitles():
result = list(generate_subtitles(path_mappings.path_replace(episode['video_path']),
[(language, is_hi, is_forced)],
audio_language,
str(episode['scene_name']),
str(episode['sceneName']),
episode['seriesTitle'],
'series',
forced_minimum_score=int(episode['score']),
@ -218,7 +243,7 @@ def upgrade_subtitles():
is_forced = "False"
is_hi = "False"
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
audio_language_list = get_audio_profile_languages(movie['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -249,7 +274,8 @@ def upgrade_subtitles():
subs_path = result[7]
store_subtitles_movie(movie['video_path'],
path_mappings.path_replace_movie(movie['video_path']))
history_log_movie(3, movie['radarrId'], message, path, language_code, provider, score, subs_id, subs_path)
history_log_movie(3, movie['radarrId'], message, path, language_code, provider, score, subs_id,
subs_path)
send_notifications_movie(movie['radarrId'], message)
hide_progress(id='upgrade_movies_progress')

View File

@ -20,7 +20,7 @@ from ..download import generate_subtitles
def _wanted_movie(movie):
audio_language_list = get_audio_profile_languages(movie_id=movie['radarrId'])
audio_language_list = get_audio_profile_languages(movie['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:

View File

@ -20,7 +20,7 @@ from ..download import generate_subtitles
def _wanted_episode(episode):
audio_language_list = get_audio_profile_languages(episode_id=episode['sonarrEpisodeId'])
audio_language_list = get_audio_profile_languages(episode['audio_language'])
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -47,7 +47,7 @@ def _wanted_episode(episode):
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
languages,
audio_language,
str(episode['scene_name']),
str(episode['sceneName']),
episode['title'],
'series',
check_if_still_required=True):
@ -56,9 +56,9 @@ def _wanted_episode(episode):
path = result[1]
forced = result[5]
if result[8]:
language_code = result[2] + ":hi"
language_code = f"{result[2]}:hi"
elif forced:
language_code = result[2] + ":forced"
language_code = f"{result[2]}:forced"
else:
language_code = result[2]
provider = result[3]
@ -79,7 +79,7 @@ def wanted_download_subtitles(sonarr_episode_id):
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.audio_language,
TableEpisodes.scene_name,
TableEpisodes.sceneName,
TableEpisodes.failedAttempts,
TableShows.title)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\

View File

@ -47,27 +47,29 @@ def get_backup_files(fullpath=True):
def backup_to_zip():
now = datetime.now()
database_backup_file = None
now_string = now.strftime("%Y.%m.%d_%H.%M.%S")
backup_filename = f"bazarr_backup_v{os.environ['BAZARR_VERSION']}_{now_string}.zip"
logging.debug(f'Backup filename will be: {backup_filename}')
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
logging.debug(f'Database file path to backup is: {database_src_file}')
if not settings.postgresql.getboolean('enabled'):
database_src_file = os.path.join(args.config_dir, 'db', 'bazarr.db')
logging.debug(f'Database file path to backup is: {database_src_file}')
try:
database_src_con = sqlite3.connect(database_src_file)
try:
database_src_con = sqlite3.connect(database_src_file)
database_backup_file = os.path.join(get_backup_path(), 'bazarr_temp.db')
database_backup_con = sqlite3.connect(database_backup_file)
database_backup_file = os.path.join(get_backup_path(), 'bazarr_temp.db')
database_backup_con = sqlite3.connect(database_backup_file)
with database_backup_con:
database_src_con.backup(database_backup_con)
with database_backup_con:
database_src_con.backup(database_backup_con)
database_backup_con.close()
database_src_con.close()
except Exception:
database_backup_file = None
logging.exception('Unable to backup database file.')
database_backup_con.close()
database_src_con.close()
except Exception:
database_backup_file = None
logging.exception('Unable to backup database file.')
config_file = os.path.join(args.config_dir, 'config', 'config.ini')
logging.debug(f'Config file path to backup is: {config_file}')
@ -75,15 +77,14 @@ def backup_to_zip():
with ZipFile(os.path.join(get_backup_path(), backup_filename), 'w') as backupZip:
if database_backup_file:
backupZip.write(database_backup_file, 'bazarr.db')
try:
os.remove(database_backup_file)
except OSError:
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
else:
logging.debug('Database file is not included in backup. See previous exception')
backupZip.write(config_file, 'config.ini')
try:
os.remove(database_backup_file)
except OSError:
logging.exception(f'Unable to delete temporary database backup file: {database_backup_file}')
def restore_from_backup():
restore_config_path = os.path.join(get_restore_path(), 'config.ini')
@ -97,30 +98,34 @@ def restore_from_backup():
os.remove(restore_config_path)
except OSError:
logging.exception(f'Unable to restore or delete config.ini to {dest_config_path}')
try:
shutil.copy(restore_database_path, dest_database_path)
os.remove(restore_database_path)
except OSError:
logging.exception(f'Unable to restore or delete db to {dest_database_path}')
else:
if not settings.postgresql.getboolean('enabled'):
try:
if os.path.isfile(dest_database_path + '-shm'):
os.remove(dest_database_path + '-shm')
if os.path.isfile(dest_database_path + '-wal'):
os.remove(dest_database_path + '-wal')
shutil.copy(restore_database_path, dest_database_path)
os.remove(restore_database_path)
except OSError:
logging.exception('Unable to delete SHM and WAL file.')
logging.exception(f'Unable to restore or delete db to {dest_database_path}')
else:
try:
if os.path.isfile(f'{dest_database_path}-shm'):
os.remove(f'{dest_database_path}-shm')
if os.path.isfile(f'{dest_database_path}-wal'):
os.remove(f'{dest_database_path}-wal')
except OSError:
logging.exception('Unable to delete SHM and WAL file.')
try:
os.remove(restore_database_path)
except OSError:
logging.exception(f'Unable to delete {dest_database_path}')
logging.info('Backup restored successfully. Bazarr will restart.')
try:
restart_file = io.open(os.path.join(args.config_dir, "bazarr.restart"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create restart file: ' + repr(e))
logging.error(f'BAZARR Cannot create restart file: {repr(e)}')
else:
logging.info('Bazarr is being restarted...')
restart_file.write(str(''))
restart_file.write('')
restart_file.close()
os._exit(0)
elif os.path.isfile(restore_config_path) or os.path.isfile(restore_database_path):
@ -134,11 +139,6 @@ def restore_from_backup():
except OSError:
logging.exception(f'Unable to delete {dest_config_path}')
try:
os.remove(restore_database_path)
except OSError:
logging.exception(f'Unable to delete {dest_database_path}')
def prepare_restore(filename):
src_zip_file_path = os.path.join(get_backup_path(), filename)