Merge development into master

This commit is contained in:
github-actions[bot] 2023-09-16 02:44:25 +00:00 committed by GitHub
commit 823f3d8d3f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
731 changed files with 314538 additions and 55732 deletions

View File

@ -4,3 +4,4 @@ libs
bazarr.py
requirements.txt
postgres-requirements.txt
migrations

View File

@ -63,6 +63,8 @@ jobs:
- name: Create Release (Conditional)
if: ${{ steps.check-ci.outputs.conclusion == 'success' }}
run: |
git config user.name "${{github.actor}}"
git config user.email "${{github.actor}}@users.noreply.github.com"
revision_count=$(git rev-list --invert-grep --regexp-ignore-case --extended-regexp --grep="^(Release|no log:|Merge.remote-tracking).*" $(git describe --tags --abbrev=0)..HEAD --count)
if [[ $revision_count != 0 ]]; then
echo "**** Found $revision_count changes! Releasing... ****"

View File

@ -55,6 +55,8 @@ jobs:
- name: Create Release
run: |
git config user.name "${{github.actor}}"
git config user.email "${{github.actor}}@users.noreply.github.com"
export RELEASE_MASTER=1
release-it --ci --increment ${{ github.event.inputs.increment }}
Merge:

View File

@ -1,11 +1,12 @@
# coding=utf-8
import operator
import ast
from functools import reduce
from flask_restx import Resource, Namespace, fields
from flask_restx import Resource, Namespace, fields, marshal
from app.database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies
from app.database import get_exclusion_clause, TableEpisodes, TableShows, TableMovies, database, select
from app.get_providers import get_throttled_providers
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client
from app.announcements import get_all_announcements
@ -30,40 +31,46 @@ class Badges(Resource):
})
@authenticate
@api_ns_badges.marshal_with(get_model, code=200)
@api_ns_badges.response(401, 'Not Authenticated')
@api_ns_badges.doc(parser=None)
def get(self):
"""Get badges count to update the UI"""
episodes_conditions = [(TableEpisodes.missing_subtitles.is_null(False)),
episodes_conditions = [(TableEpisodes.missing_subtitles.is_not(None)),
(TableEpisodes.missing_subtitles != '[]')]
episodes_conditions += get_exclusion_clause('series')
missing_episodes = TableEpisodes.select(TableShows.tags,
TableShows.seriesType,
TableEpisodes.monitored)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, episodes_conditions))\
.count()
missing_episodes = database.execute(
select(TableEpisodes.missing_subtitles)
.select_from(TableEpisodes)
.join(TableShows)
.where(reduce(operator.and_, episodes_conditions))) \
.all()
missing_episodes_count = 0
for episode in missing_episodes:
missing_episodes_count += len(ast.literal_eval(episode.missing_subtitles))
movies_conditions = [(TableMovies.missing_subtitles.is_null(False)),
movies_conditions = [(TableMovies.missing_subtitles.is_not(None)),
(TableMovies.missing_subtitles != '[]')]
movies_conditions += get_exclusion_clause('movie')
missing_movies = TableMovies.select(TableMovies.tags,
TableMovies.monitored)\
.where(reduce(operator.and_, movies_conditions))\
.count()
missing_movies = database.execute(
select(TableMovies.missing_subtitles)
.select_from(TableMovies)
.where(reduce(operator.and_, movies_conditions))) \
.all()
missing_movies_count = 0
for movie in missing_movies:
missing_movies_count += len(ast.literal_eval(movie.missing_subtitles))
throttled_providers = len(get_throttled_providers())
health_issues = len(get_health_issues())
result = {
"episodes": missing_episodes,
"movies": missing_movies,
"episodes": missing_episodes_count,
"movies": missing_movies_count,
"providers": throttled_providers,
"status": health_issues,
'sonarr_signalr': "LIVE" if sonarr_signalr_client.connected else "",
'radarr_signalr': "LIVE" if radarr_signalr_client.connected else "",
'announcements': len(get_all_announcements()),
}
return result
return marshal(result, self.get_model)

View File

@ -2,9 +2,9 @@
import pretty
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableShows, TableBlacklist
from app.database import TableEpisodes, TableShows, TableBlacklist, database, select
from subtitles.tools.delete import delete_subtitles
from sonarr.blacklist import blacklist_log, blacklist_delete_all, blacklist_delete
from utilities.path_mappings import path_mappings
@ -39,7 +39,6 @@ class EpisodesBlacklist(Resource):
})
@authenticate
@api_ns_episodes_blacklist.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_episodes_blacklist.response(401, 'Not Authenticated')
@api_ns_episodes_blacklist.doc(parser=get_request_parser)
def get(self):
@ -48,29 +47,32 @@ class EpisodesBlacklist(Resource):
start = args.get('start')
length = args.get('length')
data = TableBlacklist.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.sonarrSeriesId,
TableBlacklist.provider,
TableBlacklist.subs_id,
TableBlacklist.language,
TableBlacklist.timestamp)\
.join(TableEpisodes, on=(TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId))\
.join(TableShows, on=(TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId))\
stmt = select(TableShows.title.label('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableEpisodes.sonarrSeriesId,
TableBlacklist.provider,
TableBlacklist.subs_id,
TableBlacklist.language,
TableBlacklist.timestamp) \
.select_from(TableBlacklist) \
.join(TableShows, onclause=TableBlacklist.sonarr_series_id == TableShows.sonarrSeriesId) \
.join(TableEpisodes, onclause=TableBlacklist.sonarr_episode_id == TableEpisodes.sonarrEpisodeId) \
.order_by(TableBlacklist.timestamp.desc())
if length > 0:
data = data.limit(length).offset(start)
data = list(data.dicts())
stmt = stmt.limit(length).offset(start)
for item in data:
# Make timestamp pretty
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item.update({'timestamp': pretty.date(item['timestamp'])})
postprocess(item)
return data
return marshal([postprocess({
'seriesTitle': x.seriesTitle,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'sonarrSeriesId': x.sonarrSeriesId,
'provider': x.provider,
'subs_id': x.subs_id,
'language': x.language,
'timestamp': pretty.date(x.timestamp),
'parsed_timestamp': x.timestamp.strftime('%x %X')
}) for x in database.execute(stmt).all()], self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
@ -85,6 +87,7 @@ class EpisodesBlacklist(Resource):
@api_ns_episodes_blacklist.response(200, 'Success')
@api_ns_episodes_blacklist.response(401, 'Not Authenticated')
@api_ns_episodes_blacklist.response(404, 'Episode not found')
@api_ns_episodes_blacklist.response(500, 'Subtitles file not found or permission issue.')
def post(self):
"""Add an episodes subtitles to blacklist"""
args = self.post_request_parser.parse_args()
@ -94,15 +97,15 @@ class EpisodesBlacklist(Resource):
subs_id = args.get('subs_id')
language = args.get('language')
episodeInfo = TableEpisodes.select(TableEpisodes.path)\
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\
.dicts()\
.get_or_none()
episodeInfo = database.execute(
select(TableEpisodes.path)
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
media_path = episodeInfo['path']
media_path = episodeInfo.path
subtitles_path = args.get('subtitles_path')
blacklist_log(sonarr_series_id=sonarr_series_id,
@ -110,17 +113,19 @@ class EpisodesBlacklist(Resource):
provider=provider,
subs_id=subs_id,
language=language)
delete_subtitles(media_type='series',
language=language,
forced=False,
hi=False,
media_path=path_mappings.path_replace(media_path),
subtitles_path=subtitles_path,
sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id)
episode_download_subtitles(sonarr_episode_id)
event_stream(type='episode-history')
return '', 200
if delete_subtitles(media_type='series',
language=language,
forced=False,
hi=False,
media_path=path_mappings.path_replace(media_path),
subtitles_path=subtitles_path,
sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id):
episode_download_subtitles(sonarr_episode_id)
event_stream(type='episode-history')
return '', 200
else:
return 'Subtitles file not found or permission issue.', 500
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty episodes subtitles blacklist')

View File

@ -1,8 +1,8 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes
from app.database import TableEpisodes, database, select
from api.swaggerui import subtitles_model, subtitles_language_model, audio_language_model
from ..utils import authenticate, postprocess
@ -23,29 +23,20 @@ class Episodes(Resource):
get_audio_language_model = api_ns_episodes.model('audio_language_model', audio_language_model)
get_response_model = api_ns_episodes.model('EpisodeGetResponse', {
'rowid': fields.Integer(),
'audio_codec': fields.String(),
'audio_language': fields.Nested(get_audio_language_model),
'episode': fields.Integer(),
'episode_file_id': fields.Integer(),
'failedAttempts': fields.String(),
'file_size': fields.Integer(),
'format': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'monitored': fields.Boolean(),
'path': fields.String(),
'resolution': fields.String(),
'season': fields.Integer(),
'sonarrEpisodeId': fields.Integer(),
'sonarrSeriesId': fields.Integer(),
'subtitles': fields.Nested(get_subtitles_model),
'title': fields.String(),
'video_codec': fields.String(),
'sceneName': fields.String(),
})
@authenticate
@api_ns_episodes.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_episodes.doc(parser=get_request_parser)
@api_ns_episodes.response(200, 'Success')
@api_ns_episodes.response(401, 'Not Authenticated')
@ -56,18 +47,44 @@ class Episodes(Resource):
seriesId = args.get('seriesid[]')
episodeId = args.get('episodeid[]')
stmt = select(
TableEpisodes.audio_language,
TableEpisodes.episode,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.path,
TableEpisodes.season,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.subtitles,
TableEpisodes.title,
TableEpisodes.sceneName,
)
if len(episodeId) > 0:
result = TableEpisodes.select().where(TableEpisodes.sonarrEpisodeId.in_(episodeId)).dicts()
stmt_query = database.execute(
stmt
.where(TableEpisodes.sonarrEpisodeId.in_(episodeId)))\
.all()
elif len(seriesId) > 0:
result = TableEpisodes.select()\
.where(TableEpisodes.sonarrSeriesId.in_(seriesId))\
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc())\
.dicts()
stmt_query = database.execute(
stmt
.where(TableEpisodes.sonarrSeriesId.in_(seriesId))
.order_by(TableEpisodes.season.desc(), TableEpisodes.episode.desc()))\
.all()
else:
return "Series or Episode ID not provided", 404
result = list(result)
for item in result:
postprocess(item)
return result
return marshal([postprocess({
'audio_language': x.audio_language,
'episode': x.episode,
'missing_subtitles': x.missing_subtitles,
'monitored': x.monitored,
'path': x.path,
'season': x.season,
'sonarrEpisodeId': x.sonarrEpisodeId,
'sonarrSeriesId': x.sonarrSeriesId,
'subtitles': x.subtitles,
'title': x.title,
'sceneName': x.sceneName,
}) for x in stmt_query], self.get_response_model, envelope='data')

View File

@ -7,7 +7,7 @@ from flask_restx import Resource, Namespace, reqparse
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from werkzeug.datastructures import FileStorage
from app.database import TableShows, TableEpisodes, get_audio_profile_languages, get_profile_id
from app.database import TableShows, TableEpisodes, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from subtitles.upload import manual_upload_subtitle
from subtitles.download import generate_subtitles
@ -37,33 +37,40 @@ class EpisodesSubtitles(Resource):
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_episodes_subtitles.response(500, 'Custom error messages')
def patch(self):
"""Download an episode subtitles"""
args = self.patch_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(
TableEpisodes.path,
TableEpisodes.sceneName,
TableEpisodes.audio_language,
TableShows.title) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
episodeInfo = database.execute(
select(TableEpisodes.path,
TableEpisodes.sceneName,
TableEpisodes.audio_language,
TableShows.title)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['sceneName'] or "None"
episodePath = path_mappings.path_replace(episodeInfo.path)
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
sceneName = episodeInfo.sceneName or "None"
title = episodeInfo.title
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
audio_language_list = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -72,18 +79,18 @@ class EpisodesSubtitles(Resource):
try:
result = list(generate_subtitles(episodePath, [(language, hi, forced)], audio_language, sceneName,
title, 'series', profile_id=get_profile_id(episode_id=sonarrEpisodeId)))
if result:
if isinstance(result, list) and len(result):
result = result[0]
history_log(1, sonarrSeriesId, sonarrEpisodeId, result)
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
else:
event_stream(type='episode', payload=sonarrEpisodeId)
return 'No subtitles found', 500
except OSError:
pass
return '', 204
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
@ -99,23 +106,28 @@ class EpisodesSubtitles(Resource):
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_episodes_subtitles.response(500, 'Episode file not found. Path mapping issue?')
def post(self):
"""Upload an episode subtitles"""
args = self.post_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.audio_language) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
episodeInfo = database.execute(
select(TableEpisodes.path,
TableEpisodes.audio_language)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo['path'])
episodePath = path_mappings.path_replace(episodeInfo.path)
audio_language = get_audio_profile_languages(episodeInfo['audio_language'])
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
audio_language = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language) and isinstance(audio_language[0], dict):
audio_language = audio_language[0]
else:
@ -149,11 +161,10 @@ class EpisodesSubtitles(Resource):
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
except OSError:
pass
return '', 204
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
@ -168,23 +179,21 @@ class EpisodesSubtitles(Resource):
@api_ns_episodes_subtitles.response(204, 'Success')
@api_ns_episodes_subtitles.response(401, 'Not Authenticated')
@api_ns_episodes_subtitles.response(404, 'Episode not found')
@api_ns_episodes_subtitles.response(500, 'Subtitles file not found or permission issue.')
def delete(self):
"""Delete an episode subtitles"""
args = self.delete_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.sceneName,
TableEpisodes.audio_language) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
episodeInfo = database.execute(
select(TableEpisodes.path)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
episodePath = path_mappings.path_replace(episodeInfo['path'])
episodePath = path_mappings.path_replace(episodeInfo.path)
language = args.get('language')
forced = args.get('forced')
@ -193,13 +202,14 @@ class EpisodesSubtitles(Resource):
subtitlesPath = path_mappings.path_replace_reverse(subtitlesPath)
delete_subtitles(media_type='series',
language=language,
forced=forced,
hi=hi,
media_path=episodePath,
subtitles_path=subtitlesPath,
sonarr_series_id=sonarrSeriesId,
sonarr_episode_id=sonarrEpisodeId)
return '', 204
if delete_subtitles(media_type='series',
language=language,
forced=forced,
hi=hi,
media_path=episodePath,
subtitles_path=subtitlesPath,
sonarr_series_id=sonarrSeriesId,
sonarr_episode_id=sonarrEpisodeId):
return '', 204
else:
return 'Subtitles file not found or permission issue.', 500

View File

@ -1,17 +1,15 @@
# coding=utf-8
import os
import operator
import pretty
from flask_restx import Resource, Namespace, reqparse, fields
import ast
from functools import reduce
from app.database import TableEpisodes, TableShows, TableHistory, TableBlacklist
from subtitles.upgrade import get_upgradable_episode_subtitles
from utilities.path_mappings import path_mappings
from api.swaggerui import subtitles_language_model
from app.database import TableEpisodes, TableShows, TableHistory, TableBlacklist, database, select, func
from subtitles.upgrade import get_upgradable_episode_subtitles, _language_still_desired
import pretty
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from ..utils import authenticate, postprocess
api_ns_episodes_history = Namespace('Episodes History', description='List episodes history events')
@ -27,7 +25,6 @@ class EpisodesHistory(Resource):
get_language_model = api_ns_episodes_history.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_episodes_history.model('history_episodes_data_model', {
'id': fields.Integer(),
'seriesTitle': fields.String(),
'monitored': fields.Boolean(),
'episode_number': fields.String(),
@ -40,15 +37,14 @@ class EpisodesHistory(Resource):
'score': fields.String(),
'tags': fields.List(fields.String),
'action': fields.Integer(),
'video_path': fields.String(),
'subtitles_path': fields.String(),
'sonarrEpisodeId': fields.Integer(),
'provider': fields.String(),
'seriesType': fields.String(),
'upgradable': fields.Boolean(),
'raw_timestamp': fields.Integer(),
'parsed_timestamp': fields.String(),
'blacklisted': fields.Boolean(),
'matches': fields.List(fields.String),
'dont_matches': fields.List(fields.String),
})
get_response_model = api_ns_episodes_history.model('EpisodeHistoryGetResponse', {
@ -57,7 +53,6 @@ class EpisodesHistory(Resource):
})
@authenticate
@api_ns_episodes_history.marshal_with(get_response_model, code=200)
@api_ns_episodes_history.response(401, 'Not Authenticated')
@api_ns_episodes_history.doc(parser=get_request_parser)
def get(self):
@ -68,84 +63,116 @@ class EpisodesHistory(Resource):
episodeid = args.get('episodeid')
upgradable_episodes_not_perfect = get_upgradable_episode_subtitles()
if len(upgradable_episodes_not_perfect):
upgradable_episodes_not_perfect = [{"video_path": x['video_path'],
"timestamp": x['timestamp'],
"score": x['score'],
"tags": x['tags'],
"monitored": x['monitored'],
"seriesType": x['seriesType']}
for x in upgradable_episodes_not_perfect]
query_conditions = [(TableEpisodes.title.is_null(False))]
blacklisted_subtitles = select(TableBlacklist.provider,
TableBlacklist.subs_id) \
.subquery()
query_conditions = [(TableEpisodes.title.is_not(None))]
if episodeid:
query_conditions.append((TableEpisodes.sonarrEpisodeId == episodeid))
query_condition = reduce(operator.and_, query_conditions)
episode_history = TableHistory.select(TableHistory.id,
TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias(
'episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableHistory.timestamp,
TableHistory.subs_id,
TableHistory.description,
TableHistory.sonarrSeriesId,
TableEpisodes.path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableHistory.action,
TableHistory.video_path,
TableHistory.subtitles_path,
TableHistory.sonarrEpisodeId,
TableHistory.provider,
TableShows.seriesType) \
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.where(query_condition) \
stmt = select(TableHistory.id,
TableShows.title.label('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableHistory.timestamp,
TableHistory.subs_id,
TableHistory.description,
TableHistory.sonarrSeriesId,
TableEpisodes.path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableHistory.action,
TableHistory.video_path,
TableHistory.subtitles_path,
TableHistory.sonarrEpisodeId,
TableHistory.provider,
TableShows.seriesType,
TableShows.profileId,
TableHistory.matched,
TableHistory.not_matched,
TableEpisodes.subtitles.label('external_subtitles'),
upgradable_episodes_not_perfect.c.id.label('upgradable'),
blacklisted_subtitles.c.subs_id.label('blacklisted')) \
.select_from(TableHistory) \
.join(TableShows, onclause=TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId) \
.join(TableEpisodes, onclause=TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId) \
.join(upgradable_episodes_not_perfect, onclause=TableHistory.id == upgradable_episodes_not_perfect.c.id,
isouter=True) \
.join(blacklisted_subtitles, onclause=TableHistory.subs_id == blacklisted_subtitles.c.subs_id,
isouter=True) \
.where(reduce(operator.and_, query_conditions)) \
.order_by(TableHistory.timestamp.desc())
if length > 0:
episode_history = episode_history.limit(length).offset(start)
episode_history = list(episode_history.dicts())
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
blacklist_db = list(blacklist_db)
stmt = stmt.limit(length).offset(start)
episode_history = [{
'id': x.id,
'seriesTitle': x.seriesTitle,
'monitored': x.monitored,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'timestamp': x.timestamp,
'subs_id': x.subs_id,
'description': x.description,
'sonarrSeriesId': x.sonarrSeriesId,
'path': x.path,
'language': x.language,
'score': x.score,
'tags': x.tags,
'action': x.action,
'video_path': x.video_path,
'subtitles_path': x.subtitles_path,
'sonarrEpisodeId': x.sonarrEpisodeId,
'provider': x.provider,
'matches': x.matched,
'dont_matches': x.not_matched,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'upgradable': bool(x.upgradable) if _language_still_desired(x.language, x.profileId) else False,
'blacklisted': bool(x.blacklisted),
} for x in database.execute(stmt).all()]
for item in episode_history:
# Mark episode as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": item['score'],
"tags": str(item['tags']), "monitored": str(item['monitored']),
"seriesType": str(item['seriesType'])} in upgradable_episodes_not_perfect: # noqa: E129
if os.path.exists(path_mappings.path_replace(item['subtitles_path'])) and \
os.path.exists(path_mappings.path_replace(item['video_path'])):
item.update({"upgradable": True})
original_video_path = item['path']
original_subtitle_path = item['subtitles_path']
item.update(postprocess(item))
# Mark not upgradable if score is perfect or if video/subtitles file doesn't exist anymore
if item['upgradable']:
if original_subtitle_path not in item['external_subtitles'] or \
not item['video_path'] == original_video_path:
item.update({"upgradable": False})
del item['path']
postprocess(item)
del item['video_path']
del item['external_subtitles']
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = item['timestamp'].timestamp()
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
if item['action'] not in [0, 4, 5]:
for blacklisted_item in blacklist_db:
if blacklisted_item['provider'] == item['provider'] and \
blacklisted_item['subs_id'] == item['subs_id']:
item.update({"blacklisted": True})
break
# Parse matches and dont_matches
if item['matches']:
item.update({'matches': ast.literal_eval(item['matches'])})
else:
item.update({'matches': []})
count = TableHistory.select() \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
.where(TableEpisodes.title.is_null(False)).count()
if item['dont_matches']:
item.update({'dont_matches': ast.literal_eval(item['dont_matches'])})
else:
item.update({'dont_matches': []})
return {'data': episode_history, 'total': count}
count = database.execute(
select(func.count())
.select_from(TableHistory)
.join(TableEpisodes)
.where(TableEpisodes.title.is_not(None))) \
.scalar()
return marshal({'data': episode_history, 'total': count}, self.get_response_model)

View File

@ -2,10 +2,10 @@
import operator
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import get_exclusion_clause, TableEpisodes, TableShows
from app.database import get_exclusion_clause, TableEpisodes, TableShows, database, select, func
from api.swaggerui import subtitles_language_model
from ..utils import authenticate, postprocess
@ -25,7 +25,6 @@ class EpisodesWanted(Resource):
data_model = api_ns_episodes_wanted.model('wanted_episodes_data_model', {
'seriesTitle': fields.String(),
'monitored': fields.Boolean(),
'episode_number': fields.String(),
'episodeTitle': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
@ -33,7 +32,6 @@ class EpisodesWanted(Resource):
'sonarrEpisodeId': fields.Integer(),
'sceneName': fields.String(),
'tags': fields.List(fields.String),
'failedAttempts': fields.String(),
'seriesType': fields.String(),
})
@ -43,7 +41,6 @@ class EpisodesWanted(Resource):
})
@authenticate
@api_ns_episodes_wanted.marshal_with(get_response_model, code=200)
@api_ns_episodes_wanted.response(401, 'Not Authenticated')
@api_ns_episodes_wanted.doc(parser=get_request_parser)
def get(self):
@ -54,56 +51,48 @@ class EpisodesWanted(Resource):
wanted_conditions = [(TableEpisodes.missing_subtitles != '[]')]
if len(episodeid) > 0:
wanted_conditions.append((TableEpisodes.sonarrEpisodeId in episodeid))
wanted_conditions += get_exclusion_clause('series')
wanted_condition = reduce(operator.and_, wanted_conditions)
if len(episodeid) > 0:
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(wanted_condition)\
.dicts()
start = 0
length = 0
else:
start = args.get('start')
length = args.get('length')
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.monitored,
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).alias('episode_number'),
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableEpisodes.failedAttempts,
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(wanted_condition)\
.order_by(TableEpisodes.rowid.desc())
if length > 0:
data = data.limit(length).offset(start)
data = data.dicts()
data = list(data)
for item in data:
postprocess(item)
wanted_conditions += get_exclusion_clause('series')
wanted_condition = reduce(operator.and_, wanted_conditions)
count_conditions = [(TableEpisodes.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('series')
count = TableEpisodes.select(TableShows.tags,
TableShows.seriesType,
TableEpisodes.monitored)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, count_conditions))\
.count()
stmt = select(TableShows.title.label('seriesTitle'),
TableEpisodes.season.concat('x').concat(TableEpisodes.episode).label('episode_number'),
TableEpisodes.title.label('episodeTitle'),
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.seriesType) \
.select_from(TableEpisodes) \
.join(TableShows) \
.where(wanted_condition)
return {'data': data, 'total': count}
if length > 0:
stmt = stmt.order_by(TableEpisodes.sonarrEpisodeId.desc()).limit(length).offset(start)
results = [postprocess({
'seriesTitle': x.seriesTitle,
'episode_number': x.episode_number,
'episodeTitle': x.episodeTitle,
'missing_subtitles': x.missing_subtitles,
'sonarrSeriesId': x.sonarrSeriesId,
'sonarrEpisodeId': x.sonarrEpisodeId,
'sceneName': x.sceneName,
'tags': x.tags,
'seriesType': x.seriesType,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableEpisodes)
.join(TableShows)
.where(wanted_condition)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from utilities.filesystem import browse_bazarr_filesystem
@ -21,7 +21,6 @@ class BrowseBazarrFS(Resource):
})
@authenticate
@api_ns_files.marshal_with(get_response_model, code=200)
@api_ns_files.response(401, 'Not Authenticated')
@api_ns_files.doc(parser=get_request_parser)
def get(self):
@ -37,4 +36,4 @@ class BrowseBazarrFS(Resource):
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return data
return marshal(data, self.get_response_model)

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from radarr.filesystem import browse_radarr_filesystem
@ -22,7 +22,6 @@ class BrowseRadarrFS(Resource):
})
@authenticate
@api_ns_files_radarr.marshal_with(get_response_model, code=200)
@api_ns_files_radarr.response(401, 'Not Authenticated')
@api_ns_files_radarr.doc(parser=get_request_parser)
def get(self):
@ -38,4 +37,4 @@ class BrowseRadarrFS(Resource):
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return data
return marshal(data, self.get_response_model)

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from sonarr.filesystem import browse_sonarr_filesystem
@ -22,7 +22,6 @@ class BrowseSonarrFS(Resource):
})
@authenticate
@api_ns_files_sonarr.marshal_with(get_response_model, code=200)
@api_ns_files_sonarr.response(401, 'Not Authenticated')
@api_ns_files_sonarr.doc(parser=get_request_parser)
def get(self):
@ -38,4 +37,4 @@ class BrowseSonarrFS(Resource):
return []
for item in result['directories']:
data.append({'name': item['name'], 'children': True, 'path': item['path']})
return data
return marshal(data, self.get_response_model)

View File

@ -5,10 +5,10 @@ import operator
import itertools
from dateutil import rrule
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import TableHistory, TableHistoryMovie
from app.database import TableHistory, TableHistoryMovie, database, select
from ..utils import authenticate
@ -41,7 +41,6 @@ class HistoryStats(Resource):
})
@authenticate
@api_ns_history_stats.marshal_with(get_response_model, code=200)
@api_ns_history_stats.response(401, 'Not Authenticated')
@api_ns_history_stats.doc(parser=get_request_parser)
def get(self):
@ -86,17 +85,25 @@ class HistoryStats(Resource):
history_where_clause = reduce(operator.and_, history_where_clauses)
history_where_clause_movie = reduce(operator.and_, history_where_clauses_movie)
data_series = TableHistory.select(TableHistory.timestamp, TableHistory.id)\
.where(history_where_clause) \
.dicts()
data_series = [{
'timestamp': x.timestamp,
'id': x.id,
} for x in database.execute(
select(TableHistory.timestamp, TableHistory.id)
.where(history_where_clause))
.all()]
data_series = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_series),
key=lambda x: x['timestamp'].strftime(
'%Y-%m-%d'))]
data_movies = TableHistoryMovie.select(TableHistoryMovie.timestamp, TableHistoryMovie.id) \
.where(history_where_clause_movie) \
.dicts()
data_movies = [{
'timestamp': x.timestamp,
'id': x.id,
} for x in database.execute(
select(TableHistoryMovie.timestamp, TableHistoryMovie.id)
.where(history_where_clause_movie))
.all()]
data_movies = [{'date': date[0], 'count': sum(1 for item in date[1])} for date in
itertools.groupby(list(data_movies),
key=lambda x: x['timestamp'].strftime(
@ -113,4 +120,4 @@ class HistoryStats(Resource):
sorted_data_series = sorted(data_series, key=lambda i: i['date'])
sorted_data_movies = sorted(data_movies, key=lambda i: i['date'])
return {'series': sorted_data_series, 'movies': sorted_data_movies}
return marshal({'series': sorted_data_series, 'movies': sorted_data_movies}, self.get_response_model)

View File

@ -2,9 +2,9 @@
import pretty
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies, TableBlacklistMovie
from app.database import TableMovies, TableBlacklistMovie, database, select
from subtitles.tools.delete import delete_subtitles
from radarr.blacklist import blacklist_log_movie, blacklist_delete_all_movie, blacklist_delete_movie
from utilities.path_mappings import path_mappings
@ -37,7 +37,6 @@ class MoviesBlacklist(Resource):
})
@authenticate
@api_ns_movies_blacklist.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_movies_blacklist.response(401, 'Not Authenticated')
@api_ns_movies_blacklist.doc(parser=get_request_parser)
def get(self):
@ -46,26 +45,28 @@ class MoviesBlacklist(Resource):
start = args.get('start')
length = args.get('length')
data = TableBlacklistMovie.select(TableMovies.title,
TableMovies.radarrId,
TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id,
TableBlacklistMovie.language,
TableBlacklistMovie.timestamp)\
.join(TableMovies, on=(TableBlacklistMovie.radarr_id == TableMovies.radarrId))\
.order_by(TableBlacklistMovie.timestamp.desc())
data = database.execute(
select(TableMovies.title,
TableMovies.radarrId,
TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id,
TableBlacklistMovie.language,
TableBlacklistMovie.timestamp)
.select_from(TableBlacklistMovie)
.join(TableMovies)
.order_by(TableBlacklistMovie.timestamp.desc()))
if length > 0:
data = data.limit(length).offset(start)
data = list(data.dicts())
for item in data:
postprocess(item)
# Make timestamp pretty
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item.update({'timestamp': pretty.date(item['timestamp'])})
return data
return marshal([postprocess({
'title': x.title,
'radarrId': x.radarrId,
'provider': x.provider,
'subs_id': x.subs_id,
'language': x.language,
'timestamp': pretty.date(x.timestamp),
'parsed_timestamp': x.timestamp.strftime('%x %X'),
}) for x in data.all()], self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, required=True, help='Radarr ID')
@ -79,6 +80,7 @@ class MoviesBlacklist(Resource):
@api_ns_movies_blacklist.response(200, 'Success')
@api_ns_movies_blacklist.response(401, 'Not Authenticated')
@api_ns_movies_blacklist.response(404, 'Movie not found')
@api_ns_movies_blacklist.response(500, 'Subtitles file not found or permission issue.')
def post(self):
"""Add a movies subtitles to blacklist"""
args = self.post_request_parser.parse_args()
@ -90,28 +92,33 @@ class MoviesBlacklist(Resource):
forced = False
hi = False
data = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == radarr_id).dicts().get_or_none()
data = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == radarr_id))\
.first()
if not data:
return 'Movie not found', 404
media_path = data['path']
media_path = data.path
subtitles_path = args.get('subtitles_path')
blacklist_log_movie(radarr_id=radarr_id,
provider=provider,
subs_id=subs_id,
language=language)
delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=path_mappings.path_replace_movie(media_path),
subtitles_path=subtitles_path,
radarr_id=radarr_id)
movies_download_subtitles(radarr_id)
event_stream(type='movie-history')
return '', 200
if delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=path_mappings.path_replace_movie(media_path),
subtitles_path=subtitles_path,
radarr_id=radarr_id):
movies_download_subtitles(radarr_id)
event_stream(type='movie-history')
return '', 200
else:
return 'Subtitles file not found or permission issue.', 500
delete_request_parser = reqparse.RequestParser()
delete_request_parser.add_argument('all', type=str, required=False, help='Empty movies subtitles blacklist')

View File

@ -1,15 +1,14 @@
# coding=utf-8
import os
import operator
import pretty
import ast
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import TableMovies, TableHistoryMovie, TableBlacklistMovie
from subtitles.upgrade import get_upgradable_movies_subtitles
from utilities.path_mappings import path_mappings
from app.database import TableMovies, TableHistoryMovie, TableBlacklistMovie, database, select, func
from subtitles.upgrade import get_upgradable_movies_subtitles, _language_still_desired
from api.swaggerui import subtitles_language_model
from api.utils import authenticate, postprocess
@ -27,7 +26,6 @@ class MoviesHistory(Resource):
get_language_model = api_ns_movies_history.model('subtitles_language_model', subtitles_language_model)
data_model = api_ns_movies_history.model('history_movies_data_model', {
'id': fields.Integer(),
'action': fields.Integer(),
'title': fields.String(),
'timestamp': fields.String(),
@ -42,9 +40,10 @@ class MoviesHistory(Resource):
'provider': fields.String(),
'subtitles_path': fields.String(),
'upgradable': fields.Boolean(),
'raw_timestamp': fields.Integer(),
'parsed_timestamp': fields.String(),
'blacklisted': fields.Boolean(),
'matches': fields.List(fields.String),
'dont_matches': fields.List(fields.String),
})
get_response_model = api_ns_movies_history.model('MovieHistoryGetResponse', {
@ -53,7 +52,6 @@ class MoviesHistory(Resource):
})
@authenticate
@api_ns_movies_history.marshal_with(get_response_model, code=200)
@api_ns_movies_history.response(401, 'Not Authenticated')
@api_ns_movies_history.doc(parser=get_request_parser)
def get(self):
@ -64,79 +62,108 @@ class MoviesHistory(Resource):
radarrid = args.get('radarrid')
upgradable_movies_not_perfect = get_upgradable_movies_subtitles()
if len(upgradable_movies_not_perfect):
upgradable_movies_not_perfect = [{"video_path": x['video_path'],
"timestamp": x['timestamp'],
"score": x['score'],
"tags": x['tags'],
"monitored": x['monitored']}
for x in upgradable_movies_not_perfect]
query_conditions = [(TableMovies.title.is_null(False))]
blacklisted_subtitles = select(TableBlacklistMovie.provider,
TableBlacklistMovie.subs_id) \
.subquery()
query_conditions = [(TableMovies.title.is_not(None))]
if radarrid:
query_conditions.append((TableMovies.radarrId == radarrid))
query_condition = reduce(operator.and_, query_conditions)
movie_history = TableHistoryMovie.select(TableHistoryMovie.id,
TableHistoryMovie.action,
TableMovies.title,
TableHistoryMovie.timestamp,
TableHistoryMovie.description,
TableHistoryMovie.radarrId,
TableMovies.monitored,
TableHistoryMovie.video_path.alias('path'),
TableHistoryMovie.language,
TableMovies.tags,
TableHistoryMovie.score,
TableHistoryMovie.subs_id,
TableHistoryMovie.provider,
TableHistoryMovie.subtitles_path,
TableHistoryMovie.video_path) \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(query_condition) \
stmt = select(TableHistoryMovie.id,
TableHistoryMovie.action,
TableMovies.title,
TableHistoryMovie.timestamp,
TableHistoryMovie.description,
TableHistoryMovie.radarrId,
TableMovies.monitored,
TableMovies.path,
TableHistoryMovie.language,
TableMovies.tags,
TableHistoryMovie.score,
TableHistoryMovie.subs_id,
TableHistoryMovie.provider,
TableHistoryMovie.subtitles_path,
TableHistoryMovie.video_path,
TableHistoryMovie.matched,
TableHistoryMovie.not_matched,
TableMovies.profileId,
TableMovies.subtitles.label('external_subtitles'),
upgradable_movies_not_perfect.c.id.label('upgradable'),
blacklisted_subtitles.c.subs_id.label('blacklisted')) \
.select_from(TableHistoryMovie) \
.join(TableMovies) \
.join(upgradable_movies_not_perfect, onclause=TableHistoryMovie.id == upgradable_movies_not_perfect.c.id,
isouter=True) \
.join(blacklisted_subtitles, onclause=TableHistoryMovie.subs_id == blacklisted_subtitles.c.subs_id,
isouter=True) \
.where(reduce(operator.and_, query_conditions)) \
.order_by(TableHistoryMovie.timestamp.desc())
if length > 0:
movie_history = movie_history.limit(length).offset(start)
movie_history = list(movie_history.dicts())
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
blacklist_db = list(blacklist_db)
stmt = stmt.limit(length).offset(start)
movie_history = [{
'id': x.id,
'action': x.action,
'title': x.title,
'timestamp': x.timestamp,
'description': x.description,
'radarrId': x.radarrId,
'monitored': x.monitored,
'path': x.path,
'language': x.language,
'tags': x.tags,
'score': x.score,
'subs_id': x.subs_id,
'provider': x.provider,
'subtitles_path': x.subtitles_path,
'video_path': x.video_path,
'matches': x.matched,
'dont_matches': x.not_matched,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'upgradable': bool(x.upgradable) if _language_still_desired(x.language, x.profileId) else False,
'blacklisted': bool(x.blacklisted),
} for x in database.execute(stmt).all()]
for item in movie_history:
# Mark movies as upgradable or not
item.update({"upgradable": False})
if {"video_path": str(item['path']), "timestamp": item['timestamp'], "score": item['score'],
"tags": str(item['tags']),
"monitored": str(item['monitored'])} in upgradable_movies_not_perfect: # noqa: E129
if os.path.exists(path_mappings.path_replace_movie(item['subtitles_path'])) and \
os.path.exists(path_mappings.path_replace_movie(item['video_path'])):
item.update({"upgradable": True})
original_video_path = item['path']
original_subtitle_path = item['subtitles_path']
item.update(postprocess(item))
# Mark not upgradable if score or if video/subtitles file doesn't exist anymore
if item['upgradable']:
if original_subtitle_path not in item['external_subtitles'] or \
not item['video_path'] == original_video_path:
item.update({"upgradable": False})
del item['path']
postprocess(item)
del item['video_path']
del item['external_subtitles']
if item['score']:
item['score'] = str(round((int(item['score']) * 100 / 120), 2)) + "%"
# Make timestamp pretty
if item['timestamp']:
item["raw_timestamp"] = item['timestamp'].timestamp()
item["parsed_timestamp"] = item['timestamp'].strftime('%x %X')
item['timestamp'] = pretty.date(item["timestamp"])
# Check if subtitles is blacklisted
item.update({"blacklisted": False})
if item['action'] not in [0, 4, 5]:
for blacklisted_item in blacklist_db:
if blacklisted_item['provider'] == item['provider'] and blacklisted_item['subs_id'] == item[
'subs_id']: # noqa: E125
item.update({"blacklisted": True})
break
# Parse matches and dont_matches
if item['matches']:
item.update({'matches': ast.literal_eval(item['matches'])})
else:
item.update({'matches': []})
count = TableHistoryMovie.select() \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
.where(TableMovies.title.is_null(False)) \
.count()
if item['dont_matches']:
item.update({'dont_matches': ast.literal_eval(item['dont_matches'])})
else:
item.update({'dont_matches': []})
return {'data': movie_history, 'total': count}
count = database.execute(
select(func.count())
.select_from(TableHistoryMovie)
.join(TableMovies)
.where(TableMovies.title.is_not(None))) \
.scalar()
return marshal({'data': movie_history, 'total': count}, self.get_response_model)

View File

@ -1,8 +1,8 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies
from app.database import TableMovies, database, update, select, func
from subtitles.indexer.movies import list_missing_subtitles_movies, movies_scan_subtitles
from app.event_handler import event_stream
from subtitles.wanted import wanted_search_missing_subtitles_movies
@ -29,30 +29,20 @@ class Movies(Resource):
data_model = api_ns_movies.model('movies_data_model', {
'alternativeTitles': fields.List(fields.String),
'audio_codec': fields.String(),
'audio_language': fields.Nested(get_audio_language_model),
'failedAttempts': fields.String(),
'fanart': fields.String(),
'file_size': fields.Integer(),
'format': fields.String(),
'imdbId': fields.String(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'monitored': fields.Boolean(),
'movie_file_id': fields.Integer(),
'overview': fields.String(),
'path': fields.String(),
'poster': fields.String(),
'profileId': fields.Integer(),
'radarrId': fields.Integer(),
'resolution': fields.String(),
'rowid': fields.Integer(),
'sceneName': fields.String(),
'sortTitle': fields.String(),
'subtitles': fields.Nested(get_subtitles_model),
'tags': fields.List(fields.String),
'title': fields.String(),
'tmdbId': fields.String(),
'video_codec': fields.String(),
'year': fields.String(),
})
@ -62,7 +52,6 @@ class Movies(Resource):
})
@authenticate
@api_ns_movies.marshal_with(get_response_model, code=200)
@api_ns_movies.doc(parser=get_request_parser)
@api_ns_movies.response(200, 'Success')
@api_ns_movies.response(401, 'Not Authenticated')
@ -73,23 +62,56 @@ class Movies(Resource):
length = args.get('length')
radarrId = args.get('radarrid[]')
count = TableMovies.select().count()
stmt = select(TableMovies.alternativeTitles,
TableMovies.audio_language,
TableMovies.fanart,
TableMovies.imdbId,
TableMovies.missing_subtitles,
TableMovies.monitored,
TableMovies.overview,
TableMovies.path,
TableMovies.poster,
TableMovies.profileId,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.subtitles,
TableMovies.tags,
TableMovies.title,
TableMovies.year,
)\
.order_by(TableMovies.sortTitle)
if len(radarrId) != 0:
result = TableMovies.select()\
.where(TableMovies.radarrId.in_(radarrId))\
.order_by(TableMovies.sortTitle)\
.dicts()
else:
result = TableMovies.select().order_by(TableMovies.sortTitle)
if length > 0:
result = result.limit(length).offset(start)
result = result.dicts()
result = list(result)
for item in result:
postprocess(item)
stmt = stmt.where(TableMovies.radarrId.in_(radarrId))
return {'data': result, 'total': count}
if length > 0:
stmt = stmt.limit(length).offset(start)
results = [postprocess({
'alternativeTitles': x.alternativeTitles,
'audio_language': x.audio_language,
'fanart': x.fanart,
'imdbId': x.imdbId,
'missing_subtitles': x.missing_subtitles,
'monitored': x.monitored,
'overview': x.overview,
'path': x.path,
'poster': x.poster,
'profileId': x.profileId,
'radarrId': x.radarrId,
'sceneName': x.sceneName,
'subtitles': x.subtitles,
'tags': x.tags,
'title': x.title,
'year': x.year,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableMovies)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, action='append', required=False, default=[],
@ -120,11 +142,10 @@ class Movies(Resource):
except Exception:
return 'Languages profile not found', 404
TableMovies.update({
TableMovies.profileId: profileId
})\
.where(TableMovies.radarrId == radarrId)\
.execute()
database.execute(
update(TableMovies)
.values(profileId=profileId)
.where(TableMovies.radarrId == radarrId))
list_missing_subtitles_movies(no=radarrId, send_event=False)
@ -144,6 +165,7 @@ class Movies(Resource):
@api_ns_movies.response(204, 'Success')
@api_ns_movies.response(400, 'Unknown action')
@api_ns_movies.response(401, 'Not Authenticated')
@api_ns_movies.response(500, 'Movie file not found. Path mapping issue?')
def patch(self):
"""Run actions on specific movies"""
args = self.patch_request_parser.parse_args()
@ -153,8 +175,12 @@ class Movies(Resource):
movies_scan_subtitles(radarrid)
return '', 204
elif action == "search-missing":
movies_download_subtitles(radarrid)
return '', 204
try:
movies_download_subtitles(radarrid)
except OSError:
return 'Movie file not found. Path mapping issue?', 500
else:
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_movies()
return '', 204

View File

@ -1,6 +1,5 @@
# coding=utf-8
import contextlib
import os
import logging
@ -8,7 +7,7 @@ from flask_restx import Resource, Namespace, reqparse
from subliminal_patch.core import SUBTITLE_EXTENSIONS
from werkzeug.datastructures import FileStorage
from app.database import TableMovies, get_audio_profile_languages, get_profile_id
from app.database import TableMovies, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from subtitles.upload import manual_upload_subtitle
from subtitles.download import generate_subtitles
@ -37,48 +36,58 @@ class MoviesSubtitles(Resource):
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_movies_subtitles.response(500, 'Custom error messages')
def patch(self):
"""Download a movie subtitles"""
args = self.patch_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(
TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
movieInfo = database.execute(
select(
TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] or 'None'
moviePath = path_mappings.path_replace_movie(movieInfo.path)
title = movieInfo['title']
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
sceneName = movieInfo.sceneName or 'None'
title = movieInfo.title
language = args.get('language')
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
audio_language_list = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = None
with contextlib.suppress(OSError):
try:
result = list(generate_subtitles(moviePath, [(language, hi, forced)], audio_language,
sceneName, title, 'movie', profile_id=get_profile_id(movie_id=radarrId)))
if result:
if isinstance(result, list) and len(result):
result = result[0]
history_log_movie(1, radarrId, result)
store_subtitles_movie(result.path, moviePath)
else:
event_stream(type='movie', payload=radarrId)
return '', 204
return 'No subtitles found', 500
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
# POST: Upload Subtitles
post_request_parser = reqparse.RequestParser()
@ -94,23 +103,27 @@ class MoviesSubtitles(Resource):
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(409, 'Unable to save subtitles file. Permission or path mapping issue?')
@api_ns_movies_subtitles.response(500, 'Movie file not found. Path mapping issue?')
def post(self):
"""Upload a movie subtitles"""
# TODO: Support Multiply Upload
args = self.post_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.path,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
movieInfo = database.execute(
select(TableMovies.path, TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
moviePath = path_mappings.path_replace_movie(movieInfo.path)
audio_language = get_audio_profile_languages(movieInfo['audio_language'])
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
audio_language = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language) and isinstance(audio_language[0], dict):
audio_language = audio_language[0]
else:
@ -126,7 +139,7 @@ class MoviesSubtitles(Resource):
if not isinstance(ext, str) or ext.lower() not in SUBTITLE_EXTENSIONS:
raise ValueError('A subtitle of an invalid format was uploaded.')
with contextlib.suppress(OSError):
try:
result = manual_upload_subtitle(path=moviePath,
language=language,
forced=forced,
@ -144,7 +157,10 @@ class MoviesSubtitles(Resource):
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
return '', 204
except OSError:
return 'Unable to save subtitles file. Permission or path mapping issue?', 409
else:
return '', 204
# DELETE: Delete Subtitles
delete_request_parser = reqparse.RequestParser()
@ -159,19 +175,20 @@ class MoviesSubtitles(Resource):
@api_ns_movies_subtitles.response(204, 'Success')
@api_ns_movies_subtitles.response(401, 'Not Authenticated')
@api_ns_movies_subtitles.response(404, 'Movie not found')
@api_ns_movies_subtitles.response(500, 'Subtitles file not found or permission issue.')
def delete(self):
"""Delete a movie subtitles"""
args = self.delete_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.path) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
movieInfo = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
moviePath = path_mappings.path_replace_movie(movieInfo.path)
language = args.get('language')
forced = args.get('forced')
@ -180,12 +197,13 @@ class MoviesSubtitles(Resource):
subtitlesPath = path_mappings.path_replace_reverse_movie(subtitlesPath)
delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=moviePath,
subtitles_path=subtitlesPath,
radarr_id=radarrId)
return '', 204
if delete_subtitles(media_type='movie',
language=language,
forced=forced,
hi=hi,
media_path=moviePath,
subtitles_path=subtitlesPath,
radarr_id=radarrId):
return '', 204
else:
return 'Subtitles file not found or permission issue.', 500

View File

@ -2,10 +2,10 @@
import operator
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from app.database import get_exclusion_clause, TableMovies
from app.database import get_exclusion_clause, TableMovies, database, select, func
from api.swaggerui import subtitles_language_model
from api.utils import authenticate, postprocess
@ -26,12 +26,10 @@ class MoviesWanted(Resource):
data_model = api_ns_movies_wanted.model('wanted_movies_data_model', {
'title': fields.String(),
'monitored': fields.Boolean(),
'missing_subtitles': fields.Nested(get_subtitles_language_model),
'radarrId': fields.Integer(),
'sceneName': fields.String(),
'tags': fields.List(fields.String),
'failedAttempts': fields.String(),
})
get_response_model = api_ns_movies_wanted.model('MovieWantedGetResponse', {
@ -40,7 +38,6 @@ class MoviesWanted(Resource):
})
@authenticate
@api_ns_movies_wanted.marshal_with(get_response_model, code=200)
@api_ns_movies_wanted.response(401, 'Not Authenticated')
@api_ns_movies_wanted.doc(parser=get_request_parser)
def get(self):
@ -51,44 +48,36 @@ class MoviesWanted(Resource):
wanted_conditions = [(TableMovies.missing_subtitles != '[]')]
if len(radarrid) > 0:
wanted_conditions.append((TableMovies.radarrId.in_(radarrid)))
wanted_conditions += get_exclusion_clause('movie')
wanted_condition = reduce(operator.and_, wanted_conditions)
if len(radarrid) > 0:
result = TableMovies.select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.tags,
TableMovies.monitored)\
.where(wanted_condition)\
.dicts()
start = 0
length = 0
else:
start = args.get('start')
length = args.get('length')
result = TableMovies.select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.tags,
TableMovies.monitored)\
.where(wanted_condition)\
.order_by(TableMovies.rowid.desc())
if length > 0:
result = result.limit(length).offset(start)
result = result.dicts()
result = list(result)
for item in result:
postprocess(item)
wanted_conditions += get_exclusion_clause('movie')
wanted_condition = reduce(operator.and_, wanted_conditions)
count_conditions = [(TableMovies.missing_subtitles != '[]')]
count_conditions += get_exclusion_clause('movie')
count = TableMovies.select(TableMovies.monitored,
TableMovies.tags)\
.where(reduce(operator.and_, count_conditions))\
.count()
stmt = select(TableMovies.title,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.tags) \
.where(wanted_condition)
if length > 0:
stmt = stmt.order_by(TableMovies.radarrId.desc()).limit(length).offset(start)
return {'data': result, 'total': count}
results = [postprocess({
'title': x.title,
'missing_subtitles': x.missing_subtitles,
'radarrId': x.radarrId,
'sceneName': x.sceneName,
'tags': x.tags,
}) for x in database.execute(stmt).all()]
count = database.execute(
select(func.count())
.select_from(TableMovies)
.where(wanted_condition)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)

View File

@ -1,9 +1,9 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from operator import itemgetter
from app.database import TableHistory, TableHistoryMovie
from app.database import TableHistory, TableHistoryMovie, database, select
from app.get_providers import list_throttled_providers, reset_throttled_providers
from ..utils import authenticate, False_Keys
@ -23,7 +23,6 @@ class Providers(Resource):
})
@authenticate
@api_ns_providers.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_providers.response(200, 'Success')
@api_ns_providers.response(401, 'Not Authenticated')
@api_ns_providers.doc(parser=get_request_parser)
@ -32,20 +31,25 @@ class Providers(Resource):
args = self.get_request_parser.parse_args()
history = args.get('history')
if history and history not in False_Keys:
providers = list(TableHistory.select(TableHistory.provider)
.where(TableHistory.provider is not None and TableHistory.provider != "manual")
.dicts())
providers += list(TableHistoryMovie.select(TableHistoryMovie.provider)
.where(TableHistoryMovie.provider is not None and TableHistoryMovie.provider != "manual")
.dicts())
providers_list = list(set([x['provider'] for x in providers]))
providers = database.execute(
select(TableHistory.provider)
.where(TableHistory.provider and TableHistory.provider != "manual")
.distinct())\
.all()
providers += database.execute(
select(TableHistoryMovie.provider)
.where(TableHistoryMovie.provider and TableHistoryMovie.provider != "manual")
.distinct())\
.all()
providers_list = [x.provider for x in providers]
providers_dicts = []
for provider in providers_list:
providers_dicts.append({
'name': provider,
'status': 'History',
'retry': '-'
})
if provider not in [x['name'] for x in providers_dicts]:
providers_dicts.append({
'name': provider,
'status': 'History',
'retry': '-'
})
else:
throttled_providers = list_throttled_providers()
@ -56,7 +60,7 @@ class Providers(Resource):
"status": provider[1] if provider[1] is not None else "Good",
"retry": provider[2] if provider[2] != "now" else "-"
})
return sorted(providers_dicts, key=itemgetter('name'))
return marshal(sorted(providers_dicts, key=itemgetter('name')), self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('action', type=str, required=True, help='Action to perform from ["reset"]')

View File

@ -1,8 +1,10 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
import os
from app.database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableEpisodes, TableShows, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from app.get_providers import get_providers
from subtitles.manual import manual_search, manual_download_subtitle
@ -10,6 +12,7 @@ from sonarr.history import history_log
from app.config import settings
from app.notifier import send_notifications
from subtitles.indexer.series import store_subtitles
from subtitles.processing import ProcessSubtitlesResult
from ..utils import authenticate
@ -39,37 +42,42 @@ class ProviderEpisodes(Resource):
})
@authenticate
@api_ns_providers_episodes.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_providers_episodes.response(401, 'Not Authenticated')
@api_ns_providers_episodes.response(404, 'Episode not found')
@api_ns_providers_episodes.response(500, 'Custom error messages')
@api_ns_providers_episodes.doc(parser=get_request_parser)
def get(self):
"""Search manually for an episode subtitles"""
args = self.get_request_parser.parse_args()
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title,
TableShows.profileId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
episodeInfo = database.execute(
select(TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title,
TableShows.profileId)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['sceneName'] or "None"
profileId = episodeInfo['profileId']
title = episodeInfo.title
episodePath = path_mappings.path_replace(episodeInfo.path)
if not os.path.exists(episodePath):
return 'Episode file not found. Path mapping issue?', 500
sceneName = episodeInfo.sceneName or "None"
profileId = episodeInfo.profileId
providers_list = get_providers()
data = manual_search(episodePath, profileId, providers_list, sceneName, title, 'series')
if not data:
data = []
return data
if isinstance(data, str):
return data, 500
return marshal(data, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, required=True, help='Series ID')
@ -86,27 +94,29 @@ class ProviderEpisodes(Resource):
@api_ns_providers_episodes.response(204, 'Success')
@api_ns_providers_episodes.response(401, 'Not Authenticated')
@api_ns_providers_episodes.response(404, 'Episode not found')
@api_ns_providers_episodes.response(500, 'Custom error messages')
def post(self):
"""Manually download an episode subtitles"""
args = self.post_request_parser.parse_args()
sonarrSeriesId = args.get('seriesid')
sonarrEpisodeId = args.get('episodeid')
episodeInfo = TableEpisodes.select(
TableEpisodes.audio_language,
TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId) \
.dicts() \
.get_or_none()
episodeInfo = database.execute(
select(
TableEpisodes.audio_language,
TableEpisodes.path,
TableEpisodes.sceneName,
TableShows.title)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.sonarrEpisodeId == sonarrEpisodeId)) \
.first()
if not episodeInfo:
return 'Episode not found', 404
title = episodeInfo['title']
episodePath = path_mappings.path_replace(episodeInfo['path'])
sceneName = episodeInfo['sceneName'] or "None"
title = episodeInfo.title
episodePath = path_mappings.path_replace(episodeInfo.path)
sceneName = episodeInfo.sceneName or "None"
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
@ -114,7 +124,7 @@ class ProviderEpisodes(Resource):
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(episodeInfo["audio_language"])
audio_language_list = get_audio_profile_languages(episodeInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -124,12 +134,15 @@ class ProviderEpisodes(Resource):
result = manual_download_subtitle(episodePath, audio_language, hi, forced, subtitle, selected_provider,
sceneName, title, 'series', use_original_format,
profile_id=get_profile_id(episode_id=sonarrEpisodeId))
if result:
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, ProcessSubtitlesResult):
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
except OSError:
pass
return '', 204
elif isinstance(result, str):
return result, 500
else:
return '', 204

View File

@ -1,8 +1,10 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
import os
from app.database import TableMovies, get_audio_profile_languages, get_profile_id
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.database import TableMovies, get_audio_profile_languages, get_profile_id, database, select
from utilities.path_mappings import path_mappings
from app.get_providers import get_providers
from subtitles.manual import manual_search, manual_download_subtitle
@ -10,6 +12,7 @@ from radarr.history import history_log_movie
from app.config import settings
from app.notifier import send_notifications_movie
from subtitles.indexer.movies import store_subtitles_movie
from subtitles.processing import ProcessSubtitlesResult
from ..utils import authenticate
@ -40,36 +43,40 @@ class ProviderMovies(Resource):
})
@authenticate
@api_ns_providers_movies.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_providers_movies.response(401, 'Not Authenticated')
@api_ns_providers_movies.response(404, 'Movie not found')
@api_ns_providers_movies.response(500, 'Custom error messages')
@api_ns_providers_movies.doc(parser=get_request_parser)
def get(self):
"""Search manually for a movie subtitles"""
args = self.get_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.profileId) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
movieInfo = database.execute(
select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.profileId)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] or "None"
profileId = movieInfo['profileId']
title = movieInfo.title
moviePath = path_mappings.path_replace_movie(movieInfo.path)
if not os.path.exists(moviePath):
return 'Movie file not found. Path mapping issue?', 500
sceneName = movieInfo.sceneName or "None"
profileId = movieInfo.profileId
providers_list = get_providers()
data = manual_search(moviePath, profileId, providers_list, sceneName, title, 'movie')
if not data:
data = []
return data
if isinstance(data, str):
return data, 500
return marshal(data, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('radarrid', type=int, required=True, help='Movie ID')
@ -85,24 +92,25 @@ class ProviderMovies(Resource):
@api_ns_providers_movies.response(204, 'Success')
@api_ns_providers_movies.response(401, 'Not Authenticated')
@api_ns_providers_movies.response(404, 'Movie not found')
@api_ns_providers_movies.response(500, 'Custom error messages')
def post(self):
"""Manually download a movie subtitles"""
args = self.post_request_parser.parse_args()
radarrId = args.get('radarrid')
movieInfo = TableMovies.select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language) \
.where(TableMovies.radarrId == radarrId) \
.dicts() \
.get_or_none()
movieInfo = database.execute(
select(TableMovies.title,
TableMovies.path,
TableMovies.sceneName,
TableMovies.audio_language)
.where(TableMovies.radarrId == radarrId)) \
.first()
if not movieInfo:
return 'Movie not found', 404
title = movieInfo['title']
moviePath = path_mappings.path_replace_movie(movieInfo['path'])
sceneName = movieInfo['sceneName'] or "None"
title = movieInfo.title
moviePath = path_mappings.path_replace_movie(movieInfo.path)
sceneName = movieInfo.sceneName or "None"
hi = args.get('hi').capitalize()
forced = args.get('forced').capitalize()
@ -110,7 +118,7 @@ class ProviderMovies(Resource):
selected_provider = args.get('provider')
subtitle = args.get('subtitle')
audio_language_list = get_audio_profile_languages(movieInfo["audio_language"])
audio_language_list = get_audio_profile_languages(movieInfo.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -120,12 +128,15 @@ class ProviderMovies(Resource):
result = manual_download_subtitle(moviePath, audio_language, hi, forced, subtitle, selected_provider,
sceneName, title, 'movie', use_original_format,
profile_id=get_profile_id(movie_id=radarrId))
if result is not None:
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, ProcessSubtitlesResult):
history_log_movie(2, radarrId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):
send_notifications_movie(radarrId, result.message)
store_subtitles_movie(result.path, moviePath)
except OSError:
pass
return '', 204
elif isinstance(result, str):
return result, 500
else:
return '', 204

View File

@ -2,11 +2,10 @@
import operator
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from functools import reduce
from peewee import fn, JOIN
from app.database import get_exclusion_clause, TableEpisodes, TableShows
from app.database import get_exclusion_clause, TableEpisodes, TableShows, database, select, update, func
from subtitles.indexer.series import list_missing_subtitles, series_scan_subtitles
from subtitles.mass_download import series_download_subtitles
from subtitles.wanted import wanted_search_missing_subtitles_series
@ -45,7 +44,6 @@ class Series(Resource):
'profileId': fields.Integer(),
'seriesType': fields.String(),
'sonarrSeriesId': fields.Integer(),
'sortTitle': fields.String(),
'tags': fields.List(fields.String),
'title': fields.String(),
'tvdbId': fields.Integer(),
@ -58,7 +56,6 @@ class Series(Resource):
})
@authenticate
@api_ns_series.marshal_with(get_response_model, code=200)
@api_ns_series.doc(parser=get_request_parser)
@api_ns_series.response(200, 'Success')
@api_ns_series.response(401, 'Not Authenticated')
@ -69,40 +66,77 @@ class Series(Resource):
length = args.get('length')
seriesId = args.get('seriesid[]')
count = TableShows.select().count()
episodeFileCount = TableEpisodes.select(TableShows.sonarrSeriesId,
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeFileCount')) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.group_by(TableShows.sonarrSeriesId).alias('episodeFileCount')
episodeFileCount = select(TableShows.sonarrSeriesId,
func.count(TableEpisodes.sonarrSeriesId).label('episodeFileCount')) \
.select_from(TableEpisodes) \
.join(TableShows) \
.group_by(TableShows.sonarrSeriesId)\
.subquery()
episodes_missing_conditions = [(TableEpisodes.missing_subtitles != '[]')]
episodes_missing_conditions += get_exclusion_clause('series')
episodeMissingCount = (TableEpisodes.select(TableShows.sonarrSeriesId,
fn.COUNT(TableEpisodes.sonarrSeriesId).coerce(False).alias('episodeMissingCount'))
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))
.where(reduce(operator.and_, episodes_missing_conditions)).group_by(
TableShows.sonarrSeriesId).alias('episodeMissingCount'))
episodeMissingCount = select(TableShows.sonarrSeriesId,
func.count(TableEpisodes.sonarrSeriesId).label('episodeMissingCount')) \
.select_from(TableEpisodes) \
.join(TableShows) \
.where(reduce(operator.and_, episodes_missing_conditions)) \
.group_by(TableShows.sonarrSeriesId)\
.subquery()
result = TableShows.select(TableShows, episodeFileCount.c.episodeFileCount,
episodeMissingCount.c.episodeMissingCount).join(episodeFileCount,
join_type=JOIN.LEFT_OUTER, on=(
TableShows.sonarrSeriesId ==
episodeFileCount.c.sonarrSeriesId)
) \
.join(episodeMissingCount, join_type=JOIN.LEFT_OUTER,
on=(TableShows.sonarrSeriesId == episodeMissingCount.c.sonarrSeriesId)).order_by(TableShows.sortTitle)
stmt = select(TableShows.tvdbId,
TableShows.alternativeTitles,
TableShows.audio_language,
TableShows.fanart,
TableShows.imdbId,
TableShows.monitored,
TableShows.overview,
TableShows.path,
TableShows.poster,
TableShows.profileId,
TableShows.seriesType,
TableShows.sonarrSeriesId,
TableShows.tags,
TableShows.title,
TableShows.year,
episodeFileCount.c.episodeFileCount,
episodeMissingCount.c.episodeMissingCount) \
.select_from(TableShows) \
.join(episodeFileCount, TableShows.sonarrSeriesId == episodeFileCount.c.sonarrSeriesId, isouter=True) \
.join(episodeMissingCount, TableShows.sonarrSeriesId == episodeMissingCount.c.sonarrSeriesId, isouter=True)\
.order_by(TableShows.sortTitle)
if len(seriesId) != 0:
result = result.where(TableShows.sonarrSeriesId.in_(seriesId))
stmt = stmt.where(TableShows.sonarrSeriesId.in_(seriesId))
elif length > 0:
result = result.limit(length).offset(start)
result = list(result.dicts())
stmt = stmt.limit(length).offset(start)
for item in result:
postprocess(item)
results = [postprocess({
'tvdbId': x.tvdbId,
'alternativeTitles': x.alternativeTitles,
'audio_language': x.audio_language,
'fanart': x.fanart,
'imdbId': x.imdbId,
'monitored': x.monitored,
'overview': x.overview,
'path': x.path,
'poster': x.poster,
'profileId': x.profileId,
'seriesType': x.seriesType,
'sonarrSeriesId': x.sonarrSeriesId,
'tags': x.tags,
'title': x.title,
'year': x.year,
'episodeFileCount': x.episodeFileCount,
'episodeMissingCount': x.episodeMissingCount,
}) for x in database.execute(stmt).all()]
return {'data': result, 'total': count}
count = database.execute(
select(func.count())
.select_from(TableShows)) \
.scalar()
return marshal({'data': results, 'total': count}, self.get_response_model)
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('seriesid', type=int, action='append', required=False, default=[],
@ -133,23 +167,22 @@ class Series(Resource):
except Exception:
return 'Languages profile not found', 404
TableShows.update({
TableShows.profileId: profileId
}) \
.where(TableShows.sonarrSeriesId == seriesId) \
.execute()
database.execute(
update(TableShows)
.values(profileId=profileId)
.where(TableShows.sonarrSeriesId == seriesId))
list_missing_subtitles(no=seriesId, send_event=False)
event_stream(type='series', payload=seriesId)
episode_id_list = TableEpisodes \
.select(TableEpisodes.sonarrEpisodeId) \
.where(TableEpisodes.sonarrSeriesId == seriesId) \
.dicts()
episode_id_list = database.execute(
select(TableEpisodes.sonarrEpisodeId)
.where(TableEpisodes.sonarrSeriesId == seriesId))\
.all()
for item in episode_id_list:
event_stream(type='episode-wanted', payload=item['sonarrEpisodeId'])
event_stream(type='episode-wanted', payload=item.sonarrEpisodeId)
event_stream(type='badges')
@ -165,6 +198,7 @@ class Series(Resource):
@api_ns_series.response(204, 'Success')
@api_ns_series.response(400, 'Unknown action')
@api_ns_series.response(401, 'Not Authenticated')
@api_ns_series.response(500, 'Series directory not found. Path mapping issue?')
def patch(self):
"""Run actions on specific series"""
args = self.patch_request_parser.parse_args()
@ -174,8 +208,12 @@ class Series(Resource):
series_scan_subtitles(seriesid)
return '', 204
elif action == "search-missing":
series_download_subtitles(seriesid)
return '', 204
try:
series_download_subtitles(seriesid)
except OSError:
return 'Series directory not found. Path mapping issue?', 500
else:
return '', 204
elif action == "search-wanted":
wanted_search_missing_subtitles_series()
return '', 204

View File

@ -6,7 +6,7 @@ import gc
from flask_restx import Resource, Namespace, reqparse
from app.database import TableEpisodes, TableMovies
from app.database import TableEpisodes, TableMovies, database, select
from languages.get_languages import alpha3_from_alpha2
from utilities.path_mappings import path_mappings
from subtitles.tools.subsyncer import SubSyncer
@ -31,7 +31,7 @@ class Subtitles(Resource):
patch_request_parser.add_argument('language', type=str, required=True, help='Language code2')
patch_request_parser.add_argument('path', type=str, required=True, help='Subtitles file path')
patch_request_parser.add_argument('type', type=str, required=True, help='Media type from ["episode", "movie"]')
patch_request_parser.add_argument('id', type=int, required=True, help='Episode ID')
patch_request_parser.add_argument('id', type=int, required=True, help='Media ID (episodeId, radarrId)')
patch_request_parser.add_argument('forced', type=str, required=False, help='Forced subtitles from ["True", "False"]')
patch_request_parser.add_argument('hi', type=str, required=False, help='HI subtitles from ["True", "False"]')
patch_request_parser.add_argument('original_format', type=str, required=False,
@ -42,6 +42,8 @@ class Subtitles(Resource):
@api_ns_subtitles.response(204, 'Success')
@api_ns_subtitles.response(401, 'Not Authenticated')
@api_ns_subtitles.response(404, 'Episode/movie not found')
@api_ns_subtitles.response(409, 'Unable to edit subtitles file. Check logs.')
@api_ns_subtitles.response(500, 'Subtitles file not found. Path mapping issue?')
def patch(self):
"""Apply mods/tools on external subtitles"""
args = self.patch_request_parser.parse_args()
@ -52,33 +54,42 @@ class Subtitles(Resource):
media_type = args.get('type')
id = args.get('id')
if not os.path.exists(subtitles_path):
return 'Subtitles file not found. Path mapping issue?', 500
if media_type == 'episode':
metadata = TableEpisodes.select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)\
.where(TableEpisodes.sonarrEpisodeId == id)\
.dicts()\
.get_or_none()
metadata = database.execute(
select(TableEpisodes.path, TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.sonarrEpisodeId == id)) \
.first()
if not metadata:
return 'Episode not found', 404
video_path = path_mappings.path_replace(metadata['path'])
video_path = path_mappings.path_replace(metadata.path)
else:
metadata = TableMovies.select(TableMovies.path).where(TableMovies.radarrId == id).dicts().get_or_none()
metadata = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == id))\
.first()
if not metadata:
return 'Movie not found', 404
video_path = path_mappings.path_replace_movie(metadata['path'])
video_path = path_mappings.path_replace_movie(metadata.path)
if action == 'sync':
subsync = SubSyncer()
if media_type == 'episode':
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='series', sonarr_series_id=metadata['sonarrSeriesId'],
sonarr_episode_id=int(id))
srt_lang=language, media_type='series', sonarr_series_id=metadata.sonarrSeriesId,
sonarr_episode_id=id)
else:
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='movies', radarr_id=id)
try:
subsync.sync(video_path=video_path, srt_path=subtitles_path,
srt_lang=language, media_type='movies', radarr_id=id)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
del subsync
gc.collect()
elif action == 'translate':
@ -86,16 +97,22 @@ class Subtitles(Resource):
dest_language = language
forced = True if args.get('forced') == 'true' else False
hi = True if args.get('hi') == 'true' else False
translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
from_lang=from_language, to_lang=dest_language, forced=forced, hi=hi,
media_type="series" if media_type == "episode" else "movies",
sonarr_series_id=metadata.get('sonarrSeriesId'),
sonarr_episode_id=int(id),
radarr_id=id)
try:
translate_subtitles_file(video_path=video_path, source_srt_file=subtitles_path,
from_lang=from_language, to_lang=dest_language, forced=forced, hi=hi,
media_type="series" if media_type == "episode" else "movies",
sonarr_series_id=metadata.sonarrSeriesId if media_type == "episode" else None,
sonarr_episode_id=id,
radarr_id=id)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
else:
use_original_format = True if args.get('original_format') == 'true' else False
subtitles_apply_mods(language=language, subtitle_path=subtitles_path, mods=[action],
use_original_format=use_original_format, video_path=video_path)
try:
subtitles_apply_mods(language=language, subtitle_path=subtitles_path, mods=[action],
use_original_format=use_original_format, video_path=video_path)
except OSError:
return 'Unable to edit subtitles file. Check logs.', 409
# apply chmod if required
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
@ -105,11 +122,11 @@ class Subtitles(Resource):
if media_type == 'episode':
store_subtitles(path_mappings.path_replace_reverse(video_path), video_path)
event_stream(type='series', payload=metadata['sonarrSeriesId'])
event_stream(type='episode', payload=int(id))
event_stream(type='series', payload=metadata.sonarrSeriesId)
event_stream(type='episode', payload=id)
else:
store_subtitles_movie(path_mappings.path_replace_reverse_movie(video_path), video_path)
event_stream(type='movie', payload=int(id))
event_stream(type='movie', payload=id)
return '', 204

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from subliminal_patch.core import guessit
from ..utils import authenticate
@ -24,7 +24,6 @@ class SubtitleNameInfo(Resource):
})
@authenticate
@api_ns_subtitles_info.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_subtitles_info.response(200, 'Success')
@api_ns_subtitles_info.response(401, 'Not Authenticated')
@api_ns_subtitles_info.doc(parser=get_request_parser)
@ -60,4 +59,4 @@ class SubtitleNameInfo(Resource):
results.append(result)
return results
return marshal(results, self.get_response_model, envelope='data')

View File

@ -2,7 +2,7 @@
import gc
from flask import session
from flask import session, request
from flask_restx import Resource, Namespace, reqparse
from app.config import settings
@ -22,23 +22,31 @@ class SystemAccount(Resource):
@api_ns_system_account.doc(parser=post_request_parser)
@api_ns_system_account.response(204, 'Success')
@api_ns_system_account.response(400, 'Unknown action')
@api_ns_system_account.response(404, 'Unknown authentication type define in config.ini')
@api_ns_system_account.response(403, 'Authentication failed')
@api_ns_system_account.response(406, 'Browser must be closed to invalidate basic authentication')
@api_ns_system_account.response(500, 'Unknown authentication type define in config.ini')
def post(self):
"""Login or logout from Bazarr UI when using form login"""
args = self.post_request_parser.parse_args()
if settings.auth.type != 'form':
return 'Unknown authentication type define in config.ini', 404
return 'Unknown authentication type define in config.ini', 500
action = args.get('action')
if action == 'login':
username = args.get('username')
password = args.get('password')
if check_credentials(username, password):
if check_credentials(username, password, request):
session['logged_in'] = True
return '', 204
else:
session['logged_in'] = False
return 'Authentication failed', 403
elif action == 'logout':
session.clear()
gc.collect()
return '', 204
if settings.auth.type == 'basic':
return 'Browser must be closed to invalidate basic authentication', 406
else:
session.clear()
gc.collect()
return '', 204
return 'Unknown action', 400

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from utilities.backup import get_backup_files, prepare_restore, delete_backup_file, backup_to_zip
@ -19,14 +19,13 @@ class SystemBackups(Resource):
})
@authenticate
@api_ns_system_backups.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_system_backups.doc(parser=None)
@api_ns_system_backups.response(204, 'Success')
@api_ns_system_backups.response(401, 'Not Authenticated')
def get(self):
"""List backup files"""
backups = get_backup_files(fullpath=False)
return backups
return marshal(backups, self.get_response_model, envelope='data')
@authenticate
@api_ns_system_backups.doc(parser=None)

View File

@ -3,7 +3,7 @@
from flask_restx import Resource, Namespace, reqparse
from operator import itemgetter
from app.database import TableHistory, TableHistoryMovie, TableSettingsLanguages
from app.database import TableHistory, TableHistoryMovie, TableSettingsLanguages, database, select
from languages.get_languages import alpha2_from_alpha3, language_from_alpha2, alpha3_from_alpha2
from ..utils import authenticate, False_Keys
@ -25,13 +25,15 @@ class Languages(Resource):
args = self.get_request_parser.parse_args()
history = args.get('history')
if history and history not in False_Keys:
languages = list(TableHistory.select(TableHistory.language)
.where(TableHistory.language.is_null(False))
.dicts())
languages += list(TableHistoryMovie.select(TableHistoryMovie.language)
.where(TableHistoryMovie.language.is_null(False))
.dicts())
languages_list = list(set([lang['language'].split(':')[0] for lang in languages]))
languages = database.execute(
select(TableHistory.language)
.where(TableHistory.language.is_not(None)))\
.all()
languages += database.execute(
select(TableHistoryMovie.language)
.where(TableHistoryMovie.language.is_not(None)))\
.all()
languages_list = [lang.language.split(':')[0] for lang in languages]
languages_dicts = []
for language in languages_list:
code2 = None
@ -54,13 +56,17 @@ class Languages(Resource):
except Exception:
continue
else:
languages_dicts = TableSettingsLanguages.select(TableSettingsLanguages.name,
TableSettingsLanguages.code2,
TableSettingsLanguages.code3,
TableSettingsLanguages.enabled)\
.order_by(TableSettingsLanguages.name).dicts()
languages_dicts = list(languages_dicts)
for item in languages_dicts:
item['enabled'] = item['enabled'] == 1
languages_dicts = [{
'name': x.name,
'code2': x.code2,
'code3': x.code3,
'enabled': x.enabled == 1
} for x in database.execute(
select(TableSettingsLanguages.name,
TableSettingsLanguages.code2,
TableSettingsLanguages.code3,
TableSettingsLanguages.enabled)
.order_by(TableSettingsLanguages.name))
.all()]
return sorted(languages_dicts, key=itemgetter('name'))

View File

@ -3,7 +3,7 @@
import io
import os
from flask_restx import Resource, Namespace, fields
from flask_restx import Resource, Namespace, fields, marshal
from app.logger import empty_log
from app.get_args import args
@ -23,7 +23,6 @@ class SystemLogs(Resource):
})
@authenticate
@api_ns_system_logs.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_system_logs.doc(parser=None)
@api_ns_system_logs.response(200, 'Success')
@api_ns_system_logs.response(401, 'Not Authenticated')
@ -50,7 +49,7 @@ class SystemLogs(Resource):
logs.append(log)
logs.reverse()
return logs
return marshal(logs, self.get_response_model, envelope='data')
@authenticate
@api_ns_system_logs.doc(parser=None)

View File

@ -5,7 +5,7 @@ import json
import os
import logging
from flask_restx import Resource, Namespace, fields
from flask_restx import Resource, Namespace, fields, marshal
from app.config import settings
from app.get_args import args
@ -26,7 +26,6 @@ class SystemReleases(Resource):
})
@authenticate
@api_ns_system_releases.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_system_releases.doc(parser=None)
@api_ns_system_releases.response(200, 'Success')
@api_ns_system_releases.response(401, 'Not Authenticated')
@ -60,4 +59,4 @@ class SystemReleases(Resource):
except Exception:
logging.exception(
'BAZARR cannot parse releases caching file: ' + os.path.join(args.config_dir, 'config', 'releases.txt'))
return filtered_releases
return marshal(filtered_releases, self.get_response_model, envelope='data')

View File

@ -1,9 +1,10 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse
from unidecode import unidecode
from app.config import settings
from app.database import TableShows, TableMovies
from app.database import TableShows, TableMovies, database, select
from ..utils import authenticate
@ -22,30 +23,42 @@ class Searches(Resource):
def get(self):
"""List results from query"""
args = self.get_request_parser.parse_args()
query = args.get('query')
query = unidecode(args.get('query')).lower()
search_list = []
if query:
if settings.general.getboolean('use_sonarr'):
# Get matching series
series = TableShows.select(TableShows.title,
TableShows.sonarrSeriesId,
TableShows.year)\
.where(TableShows.title.contains(query))\
.order_by(TableShows.title)\
.dicts()
series = list(series)
search_list += series
search_list += database.execute(
select(TableShows.title,
TableShows.sonarrSeriesId,
TableShows.year)
.order_by(TableShows.title)) \
.all()
if settings.general.getboolean('use_radarr'):
# Get matching movies
movies = TableMovies.select(TableMovies.title,
TableMovies.radarrId,
TableMovies.year) \
.where(TableMovies.title.contains(query)) \
.order_by(TableMovies.title) \
.dicts()
movies = list(movies)
search_list += movies
search_list += database.execute(
select(TableMovies.title,
TableMovies.radarrId,
TableMovies.year)
.order_by(TableMovies.title)) \
.all()
return search_list
results = []
for x in search_list:
if query in unidecode(x.title).lower():
result = {
'title': x.title,
'year': x.year,
}
if hasattr(x, 'sonarrSeriesId'):
result['sonarrSeriesId'] = x.sonarrSeriesId
else:
result['radarrId'] = x.radarrId
results.append(result)
return results

View File

@ -5,8 +5,8 @@ import json
from flask import request, jsonify
from flask_restx import Resource, Namespace
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableShows, TableMovies, \
TableSettingsNotifier, update_profile_id_list
from app.database import TableLanguagesProfiles, TableSettingsLanguages, TableSettingsNotifier, \
update_profile_id_list, database, insert, update, delete, select
from app.event_handler import event_stream
from app.config import settings, save_settings, get_settings
from app.scheduler import scheduler
@ -24,15 +24,17 @@ class SystemSettings(Resource):
@authenticate
def get(self):
data = get_settings()
notifications = TableSettingsNotifier.select().order_by(TableSettingsNotifier.name).dicts()
notifications = list(notifications)
for i, item in enumerate(notifications):
item["enabled"] = item["enabled"] == 1
notifications[i] = item
data['notifications'] = dict()
data['notifications']['providers'] = notifications
data['notifications']['providers'] = [{
'name': x.name,
'enabled': x.enabled == 1,
'url': x.url
} for x in database.execute(
select(TableSettingsNotifier.name,
TableSettingsNotifier.enabled,
TableSettingsNotifier.url)
.order_by(TableSettingsNotifier.name))
.all()]
return jsonify(data)
@ -40,57 +42,55 @@ class SystemSettings(Resource):
def post(self):
enabled_languages = request.form.getlist('languages-enabled')
if len(enabled_languages) != 0:
TableSettingsLanguages.update({
TableSettingsLanguages.enabled: 0
}).execute()
database.execute(
update(TableSettingsLanguages)
.values(enabled=0))
for code in enabled_languages:
TableSettingsLanguages.update({
TableSettingsLanguages.enabled: 1
})\
.where(TableSettingsLanguages.code2 == code)\
.execute()
database.execute(
update(TableSettingsLanguages)
.values(enabled=1)
.where(TableSettingsLanguages.code2 == code))
event_stream("languages")
languages_profiles = request.form.get('languages-profiles')
if languages_profiles:
existing_ids = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId).dicts()
existing_ids = list(existing_ids)
existing = [x['profileId'] for x in existing_ids]
existing_ids = database.execute(
select(TableLanguagesProfiles.profileId))\
.all()
existing = [x.profileId for x in existing_ids]
for item in json.loads(languages_profiles):
if item['profileId'] in existing:
# Update existing profiles
TableLanguagesProfiles.update({
TableLanguagesProfiles.name: item['name'],
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
TableLanguagesProfiles.items: json.dumps(item['items']),
TableLanguagesProfiles.mustContain: item['mustContain'],
TableLanguagesProfiles.mustNotContain: item['mustNotContain'],
TableLanguagesProfiles.originalFormat: item['originalFormat'] if item['originalFormat'] != 'null' else None,
})\
.where(TableLanguagesProfiles.profileId == item['profileId'])\
.execute()
database.execute(
update(TableLanguagesProfiles)
.values(
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] != 'null' else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=item['originalFormat'] if item['originalFormat'] != 'null' else None,
)
.where(TableLanguagesProfiles.profileId == item['profileId']))
existing.remove(item['profileId'])
else:
# Add new profiles
TableLanguagesProfiles.insert({
TableLanguagesProfiles.profileId: item['profileId'],
TableLanguagesProfiles.name: item['name'],
TableLanguagesProfiles.cutoff: item['cutoff'] if item['cutoff'] != 'null' else None,
TableLanguagesProfiles.items: json.dumps(item['items']),
TableLanguagesProfiles.mustContain: item['mustContain'],
TableLanguagesProfiles.mustNotContain: item['mustNotContain'],
TableLanguagesProfiles.originalFormat: item['originalFormat'] if item['originalFormat'] != 'null' else None,
}).execute()
database.execute(
insert(TableLanguagesProfiles)
.values(
profileId=item['profileId'],
name=item['name'],
cutoff=item['cutoff'] if item['cutoff'] != 'null' else None,
items=json.dumps(item['items']),
mustContain=str(item['mustContain']),
mustNotContain=str(item['mustNotContain']),
originalFormat=item['originalFormat'] if item['originalFormat'] != 'null' else None,
))
for profileId in existing:
# Unassign this profileId from series and movies
TableShows.update({
TableShows.profileId: None
}).where(TableShows.profileId == profileId).execute()
TableMovies.update({
TableMovies.profileId: None
}).where(TableMovies.profileId == profileId).execute()
# Remove deleted profiles
TableLanguagesProfiles.delete().where(TableLanguagesProfiles.profileId == profileId).execute()
database.execute(
delete(TableLanguagesProfiles)
.where(TableLanguagesProfiles.profileId == profileId))
# invalidate cache
update_profile_id_list.invalidate()
@ -106,10 +106,11 @@ class SystemSettings(Resource):
notifications = request.form.getlist('notifications-providers')
for item in notifications:
item = json.loads(item)
TableSettingsNotifier.update({
TableSettingsNotifier.enabled: item['enabled'],
TableSettingsNotifier.url: item['url']
}).where(TableSettingsNotifier.name == item['name']).execute()
database.execute(
update(TableSettingsNotifier).values(
enabled=int(item['enabled'] is True),
url=item['url'])
.where(TableSettingsNotifier.name == item['name']))
save_settings(zip(request.form.keys(), request.form.listvalues()))
event_stream("settings")

View File

@ -1,6 +1,6 @@
# coding=utf-8
from flask_restx import Resource, Namespace, reqparse, fields
from flask_restx import Resource, Namespace, reqparse, fields, marshal
from app.scheduler import scheduler
@ -24,7 +24,6 @@ class SystemTasks(Resource):
get_request_parser.add_argument('taskid', type=str, required=False, help='List tasks or a single task properties')
@authenticate
@api_ns_system_tasks.marshal_with(get_response_model, envelope='data', code=200)
@api_ns_system_tasks.doc(parser=None)
@api_ns_system_tasks.response(200, 'Success')
@api_ns_system_tasks.response(401, 'Not Authenticated')
@ -41,7 +40,7 @@ class SystemTasks(Resource):
task_list = [item]
continue
return task_list
return marshal(task_list, self.get_response_model, envelope='data')
post_request_parser = reqparse.RequestParser()
post_request_parser.add_argument('taskid', type=str, required=True, help='Task id of the task to run')

View File

@ -36,7 +36,7 @@ def authenticate(actual_method):
def postprocess(item):
# Remove ffprobe_cache
if item.get('movie_file_id'):
if item.get('radarrId'):
path_replace = path_mappings.path_replace_movie
else:
path_replace = path_mappings.path_replace
@ -57,12 +57,6 @@ def postprocess(item):
else:
item['alternativeTitles'] = []
# Parse failed attempts
if item.get('failedAttempts'):
item['failedAttempts'] = ast.literal_eval(item['failedAttempts'])
else:
item['failedAttempts'] = []
# Parse subtitles
if item.get('subtitles'):
item['subtitles'] = ast.literal_eval(item['subtitles'])
@ -135,10 +129,6 @@ def postprocess(item):
"hi": bool(item['language'].endswith(':hi')),
}
# Parse seriesType
if item.get('seriesType'):
item['seriesType'] = item['seriesType'].capitalize()
if item.get('path'):
item['path'] = path_replace(item['path'])
@ -149,8 +139,10 @@ def postprocess(item):
# map poster and fanart to server proxy
if item.get('poster') is not None:
poster = item['poster']
item['poster'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{poster}" if poster else None
item['poster'] = f"{base_url}/images/{'movies' if item.get('radarrId') else 'series'}{poster}" if poster else None
if item.get('fanart') is not None:
fanart = item['fanart']
item['fanart'] = f"{base_url}/images/{'movies' if item.get('movie_file_id') else 'series'}{fanart}" if fanart else None
item['fanart'] = f"{base_url}/images/{'movies' if item.get('radarrId') else 'series'}{fanart}" if fanart else None
return item

View File

@ -8,7 +8,7 @@ import logging
from flask_restx import Resource, Namespace, reqparse
from bs4 import BeautifulSoup as bso
from app.database import TableEpisodes, TableShows, TableMovies
from app.database import TableEpisodes, TableShows, TableMovies, database, select
from subtitles.mass_download import episode_download_subtitles, movies_download_subtitles
from ..utils import authenticate
@ -73,16 +73,17 @@ class WebHooksPlex(Resource):
logging.debug('BAZARR is unable to get series IMDB id.')
return 'IMDB series ID not found', 404
else:
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
sonarrEpisodeId = database.execute(
select(TableEpisodes.sonarrEpisodeId)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableShows.imdbId == series_imdb_id,
TableEpisodes.season == season,
TableEpisodes.episode == episode) \
.dicts() \
.get_or_none()
TableEpisodes.episode == episode)) \
.first()
if sonarrEpisodeId:
episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True)
episode_download_subtitles(no=sonarrEpisodeId.sonarrEpisodeId, send_progress=True)
else:
try:
movie_imdb_id = [x['imdb'] for x in ids if 'imdb' in x][0]
@ -90,12 +91,12 @@ class WebHooksPlex(Resource):
logging.debug('BAZARR is unable to get movie IMDB id.')
return 'IMDB movie ID not found', 404
else:
radarrId = TableMovies.select(TableMovies.radarrId)\
.where(TableMovies.imdbId == movie_imdb_id)\
.dicts()\
.get_or_none()
radarrId = database.execute(
select(TableMovies.radarrId)
.where(TableMovies.imdbId == movie_imdb_id)) \
.first()
if radarrId:
movies_download_subtitles(no=radarrId['radarrId'])
movies_download_subtitles(no=radarrId.radarrId)
return '', 200

View File

@ -2,7 +2,7 @@
from flask_restx import Resource, Namespace, reqparse
from app.database import TableMovies
from app.database import TableMovies, database, select
from subtitles.mass_download import movies_download_subtitles
from subtitles.indexer.movies import store_subtitles_movie
from utilities.path_mappings import path_mappings
@ -28,14 +28,13 @@ class WebHooksRadarr(Resource):
args = self.post_request_parser.parse_args()
movie_file_id = args.get('radarr_moviefile_id')
radarrMovieId = TableMovies.select(TableMovies.radarrId,
TableMovies.path) \
.where(TableMovies.movie_file_id == movie_file_id) \
.dicts() \
.get_or_none()
radarrMovieId = database.execute(
select(TableMovies.radarrId, TableMovies.path)
.where(TableMovies.movie_file_id == movie_file_id)) \
.first()
if radarrMovieId:
store_subtitles_movie(radarrMovieId['path'], path_mappings.path_replace_movie(radarrMovieId['path']))
movies_download_subtitles(no=radarrMovieId['radarrId'])
store_subtitles_movie(radarrMovieId.path, path_mappings.path_replace_movie(radarrMovieId.path))
movies_download_subtitles(no=radarrMovieId.radarrId)
return '', 200

View File

@ -2,7 +2,7 @@
from flask_restx import Resource, Namespace, reqparse
from app.database import TableEpisodes, TableShows
from app.database import TableEpisodes, TableShows, database, select
from subtitles.mass_download import episode_download_subtitles
from subtitles.indexer.series import store_subtitles
from utilities.path_mappings import path_mappings
@ -28,15 +28,15 @@ class WebHooksSonarr(Resource):
args = self.post_request_parser.parse_args()
episode_file_id = args.get('sonarr_episodefile_id')
sonarrEpisodeId = TableEpisodes.select(TableEpisodes.sonarrEpisodeId,
TableEpisodes.path) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.episode_file_id == episode_file_id) \
.dicts() \
.get_or_none()
sonarrEpisodeId = database.execute(
select(TableEpisodes.sonarrEpisodeId, TableEpisodes.path)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.episode_file_id == episode_file_id)) \
.first()
if sonarrEpisodeId:
store_subtitles(sonarrEpisodeId['path'], path_mappings.path_replace(sonarrEpisodeId['path']))
episode_download_subtitles(no=sonarrEpisodeId['sonarrEpisodeId'], send_progress=True)
store_subtitles(sonarrEpisodeId.path, path_mappings.path_replace(sonarrEpisodeId.path))
episode_download_subtitles(no=sonarrEpisodeId.sonarrEpisodeId, send_progress=True)
return '', 200

View File

@ -11,10 +11,11 @@ from datetime import datetime
from operator import itemgetter
from app.get_providers import get_enabled_providers
from app.database import TableAnnouncements
from app.database import TableAnnouncements, database, insert, select
from .get_args import args
from sonarr.info import get_sonarr_info
from radarr.info import get_radarr_info
from app.check_update import deprecated_python_version
# Announcements as receive by browser must be in the form of a list of dicts converted to JSON
@ -42,7 +43,8 @@ def parse_announcement_dict(announcement_dict):
def get_announcements_to_file():
try:
r = requests.get("https://raw.githubusercontent.com/morpheus65535/bazarr-binaries/master/announcements.json")
r = requests.get("https://raw.githubusercontent.com/morpheus65535/bazarr-binaries/master/announcements.json",
timeout=10)
except requests.exceptions.HTTPError:
logging.exception("Error trying to get announcements from Github. Http error.")
except requests.exceptions.ConnectionError:
@ -104,6 +106,15 @@ def get_local_announcements():
'timestamp': 1679606309,
})
# deprecated Python versions
if deprecated_python_version():
announcements.append({
'text': 'Starting with Bazarr 1.4, support for Python 3.7 will get dropped. Upgrade your current version of'
' Python ASAP to get further updates.',
'dismissible': False,
'timestamp': 1691162383,
})
for announcement in announcements:
if 'enabled' not in announcement:
announcement['enabled'] = True
@ -116,9 +127,12 @@ def get_local_announcements():
def get_all_announcements():
# get announcements that haven't been dismissed yet
announcements = [parse_announcement_dict(x) for x in get_online_announcements() + get_local_announcements() if
x['enabled'] and (not x['dismissible'] or not TableAnnouncements.select()
.where(TableAnnouncements.hash ==
hashlib.sha256(x['text'].encode('UTF8')).hexdigest()).get_or_none())]
x['enabled'] and (not x['dismissible'] or not
database.execute(
select(TableAnnouncements)
.where(TableAnnouncements.hash ==
hashlib.sha256(x['text'].encode('UTF8')).hexdigest()))
.first())]
return sorted(announcements, key=itemgetter('timestamp'), reverse=True)
@ -126,8 +140,9 @@ def get_all_announcements():
def mark_announcement_as_dismissed(hashed_announcement):
text = [x['text'] for x in get_all_announcements() if x['hash'] == hashed_announcement]
if text:
TableAnnouncements.insert({TableAnnouncements.hash: hashed_announcement,
TableAnnouncements.timestamp: datetime.now(),
TableAnnouncements.text: text[0]})\
.on_conflict_ignore(ignore=True)\
.execute()
database.execute(
insert(TableAnnouncements)
.values(hash=hashed_announcement,
timestamp=datetime.now(),
text=text[0])
.on_conflict_do_nothing())

View File

@ -45,14 +45,13 @@ def create_app():
# generated by the request.
@app.before_request
def _db_connect():
database.connect()
database.begin()
# This hook ensures that the connection is closed when we've finished
# processing the request.
@app.teardown_request
def _db_close(exc):
if not database.is_closed():
database.close()
database.close()
return app

View File

@ -6,6 +6,7 @@ import logging
import json
import requests
import semver
import sys
from shutil import rmtree
from zipfile import ZipFile
@ -14,6 +15,11 @@ from .get_args import args
from .config import settings
def deprecated_python_version():
# return True if Python version is deprecated
return sys.version_info.major == 2 or (sys.version_info.major == 3 and sys.version_info.minor < 8)
def check_releases():
releases = []
url_releases = 'https://api.github.com/repos/morpheus65535/Bazarr/releases?per_page=100'
@ -62,13 +68,26 @@ def check_if_new_update():
with open(os.path.join(args.config_dir, 'config', 'releases.txt'), 'r') as f:
data = json.load(f)
if not args.no_update:
release = None
if use_prerelease:
release = next((item for item in data), None)
if deprecated_python_version():
release = next((item['name'].lstrip('v') for item in data if
semver.VersionInfo.parse('1.3.1') > semver.VersionInfo.parse(item['name'].lstrip('v'))))
else:
release = next((item for item in data), None)
else:
release = next((item for item in data if not item["prerelease"]), None)
if deprecated_python_version():
next((item['name'].lstrip('v') for item in data if
not item['prerelease'] and semver.VersionInfo.parse('1.3.1') > semver.VersionInfo.parse(
item['name'].lstrip('v'))))
else:
release = next((item for item in data if not item["prerelease"]), None)
if release:
if release and 'name' in release:
logging.debug('BAZARR last release available is {}'.format(release['name']))
if deprecated_python_version():
logging.warning('BAZARR is using a deprecated Python version, you must update Python to get latest '
'version available.')
current_version = None
try:

View File

@ -61,7 +61,7 @@ defaults = {
'ignore_pgs_subs': 'False',
'ignore_vobsub_subs': 'False',
'ignore_ass_subs': 'False',
'adaptive_searching': 'False',
'adaptive_searching': 'True',
'adaptive_searching_delay': '3w',
'adaptive_searching_delta': '1w',
'enabled_providers': '[]',
@ -594,10 +594,8 @@ def save_settings(settings_items):
if audio_tracks_parsing_changed:
from .scheduler import scheduler
if settings.general.getboolean('use_sonarr'):
from sonarr.sync.episodes import sync_episodes
from sonarr.sync.series import update_series
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1)
if settings.general.getboolean('use_radarr'):
from radarr.sync.movies import update_movies
scheduler.add_job(update_movies, kwargs={'send_event': True}, max_instances=1)

View File

@ -4,19 +4,20 @@ import atexit
import json
import logging
import os
import time
from datetime import datetime
import flask_migrate
from dogpile.cache import make_region
from peewee import Model, AutoField, TextField, IntegerField, ForeignKeyField, BlobField, BooleanField, BigIntegerField, \
DateTimeField, OperationalError, PostgresqlDatabase
from playhouse.migrate import PostgresqlMigrator
from playhouse.migrate import SqliteMigrator, migrate
from playhouse.shortcuts import ThreadSafeDatabaseMetadata, ReconnectMixin
from playhouse.sqlite_ext import RowIDField
from playhouse.sqliteq import SqliteQueueDatabase
from datetime import datetime
from sqlalchemy import create_engine, inspect, DateTime, ForeignKey, Integer, LargeBinary, Text, func, text
# importing here to be indirectly imported in other modules later
from sqlalchemy import update, delete, select, func # noqa W0611
from sqlalchemy.orm import scoped_session, sessionmaker, mapped_column
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import NullPool
from flask_sqlalchemy import SQLAlchemy
from utilities.path_mappings import path_mappings
from .config import settings, get_array_from
from .get_args import args
@ -26,11 +27,12 @@ postgresql = (os.getenv("POSTGRES_ENABLED", settings.postgresql.enabled).lower()
region = make_region().configure('dogpile.cache.memory')
migrations_directory = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'migrations')
if postgresql:
class ReconnectPostgresqlDatabase(ReconnectMixin, PostgresqlDatabase):
reconnect_errors = (
(OperationalError, 'server closed the connection unexpectedly'),
)
# insert is different between database types
from sqlalchemy.dialects.postgresql import insert # noqa E402
from sqlalchemy.engine import URL # noqa E402
postgres_database = os.getenv("POSTGRES_DATABASE", settings.postgresql.database)
postgres_username = os.getenv("POSTGRES_USERNAME", settings.postgresql.username)
@ -38,520 +40,285 @@ if postgresql:
postgres_host = os.getenv("POSTGRES_HOST", settings.postgresql.host)
postgres_port = os.getenv("POSTGRES_PORT", settings.postgresql.port)
logger.debug(
f"Connecting to PostgreSQL database: {postgres_host}:{postgres_port}/{postgres_database}")
database = ReconnectPostgresqlDatabase(postgres_database,
user=postgres_username,
password=postgres_password,
host=postgres_host,
port=postgres_port,
autocommit=True,
autorollback=True,
autoconnect=True,
)
migrator = PostgresqlMigrator(database)
logger.debug(f"Connecting to PostgreSQL database: {postgres_host}:{postgres_port}/{postgres_database}")
url = URL.create(
drivername="postgresql",
username=postgres_username,
password=postgres_password,
host=postgres_host,
port=postgres_port,
database=postgres_database
)
engine = create_engine(url, poolclass=NullPool, isolation_level="AUTOCOMMIT")
else:
db_path = os.path.join(args.config_dir, 'db', 'bazarr.db')
logger.debug(f"Connecting to SQLite database: {db_path}")
database = SqliteQueueDatabase(db_path,
use_gevent=False,
autostart=True,
queue_max_size=256)
migrator = SqliteMigrator(database)
# insert is different between database types
from sqlalchemy.dialects.sqlite import insert # noqa E402
url = f'sqlite:///{os.path.join(args.config_dir, "db", "bazarr.db")}'
logger.debug(f"Connecting to SQLite database: {url}")
engine = create_engine(url, poolclass=NullPool, isolation_level="AUTOCOMMIT")
from sqlalchemy.engine import Engine
from sqlalchemy import event
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
session_factory = sessionmaker(bind=engine)
database = scoped_session(session_factory)
@atexit.register
def _stop_worker_threads():
if not postgresql:
database.stop()
database.remove()
class UnknownField(object):
def __init__(self, *_, **__): pass
Base = declarative_base()
metadata = Base.metadata
class BaseModel(Model):
class Meta:
database = database
model_metadata_class = ThreadSafeDatabaseMetadata
class System(Base):
__tablename__ = 'system'
id = mapped_column(Integer, primary_key=True)
configured = mapped_column(Text)
updated = mapped_column(Text)
class System(BaseModel):
configured = TextField(null=True)
updated = TextField(null=True)
class TableAnnouncements(Base):
__tablename__ = 'table_announcements'
class Meta:
table_name = 'system'
primary_key = False
id = mapped_column(Integer, primary_key=True)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
hash = mapped_column(Text)
text = mapped_column(Text)
class TableBlacklist(BaseModel):
language = TextField(null=True)
provider = TextField(null=True)
sonarr_episode_id = IntegerField(null=True)
sonarr_series_id = IntegerField(null=True)
subs_id = TextField(null=True)
timestamp = DateTimeField(null=True)
class TableBlacklist(Base):
__tablename__ = 'table_blacklist'
class Meta:
table_name = 'table_blacklist'
primary_key = False
id = mapped_column(Integer, primary_key=True)
language = mapped_column(Text)
provider = mapped_column(Text)
sonarr_episode_id = mapped_column(Integer, ForeignKey('table_episodes.sonarrEpisodeId', ondelete='CASCADE'))
sonarr_series_id = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
timestamp = mapped_column(DateTime, default=datetime.now)
class TableBlacklistMovie(BaseModel):
language = TextField(null=True)
provider = TextField(null=True)
radarr_id = IntegerField(null=True)
subs_id = TextField(null=True)
timestamp = DateTimeField(null=True)
class TableBlacklistMovie(Base):
__tablename__ = 'table_blacklist_movie'
class Meta:
table_name = 'table_blacklist_movie'
primary_key = False
id = mapped_column(Integer, primary_key=True)
language = mapped_column(Text)
provider = mapped_column(Text)
radarr_id = mapped_column(Integer, ForeignKey('table_movies.radarrId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
timestamp = mapped_column(DateTime, default=datetime.now)
class TableEpisodes(BaseModel):
rowid = RowIDField()
audio_codec = TextField(null=True)
audio_language = TextField(null=True)
episode = IntegerField()
episode_file_id = IntegerField(null=True)
failedAttempts = TextField(null=True)
ffprobe_cache = BlobField(null=True)
file_size = BigIntegerField(default=0, null=True)
format = TextField(null=True)
missing_subtitles = TextField(null=True)
monitored = TextField(null=True)
path = TextField()
resolution = TextField(null=True)
sceneName = TextField(null=True)
season = IntegerField()
sonarrEpisodeId = IntegerField(unique=True)
sonarrSeriesId = IntegerField()
subtitles = TextField(null=True)
title = TextField()
video_codec = TextField(null=True)
class TableEpisodes(Base):
__tablename__ = 'table_episodes'
class Meta:
table_name = 'table_episodes'
primary_key = False
audio_codec = mapped_column(Text)
audio_language = mapped_column(Text)
episode = mapped_column(Integer, nullable=False)
episode_file_id = mapped_column(Integer)
failedAttempts = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(Integer)
format = mapped_column(Text)
missing_subtitles = mapped_column(Text)
monitored = mapped_column(Text)
path = mapped_column(Text, nullable=False)
resolution = mapped_column(Text)
sceneName = mapped_column(Text)
season = mapped_column(Integer, nullable=False)
sonarrEpisodeId = mapped_column(Integer, primary_key=True)
sonarrSeriesId = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subtitles = mapped_column(Text)
title = mapped_column(Text, nullable=False)
video_codec = mapped_column(Text)
class TableHistory(BaseModel):
action = IntegerField()
description = TextField()
id = AutoField()
language = TextField(null=True)
provider = TextField(null=True)
score = IntegerField(null=True)
sonarrEpisodeId = IntegerField()
sonarrSeriesId = IntegerField()
subs_id = TextField(null=True)
subtitles_path = TextField(null=True)
timestamp = DateTimeField()
video_path = TextField(null=True)
class TableHistory(Base):
__tablename__ = 'table_history'
class Meta:
table_name = 'table_history'
id = mapped_column(Integer, primary_key=True)
action = mapped_column(Integer, nullable=False)
description = mapped_column(Text, nullable=False)
language = mapped_column(Text)
provider = mapped_column(Text)
score = mapped_column(Integer)
sonarrEpisodeId = mapped_column(Integer, ForeignKey('table_episodes.sonarrEpisodeId', ondelete='CASCADE'))
sonarrSeriesId = mapped_column(Integer, ForeignKey('table_shows.sonarrSeriesId', ondelete='CASCADE'))
subs_id = mapped_column(Text)
subtitles_path = mapped_column(Text)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
class TableHistoryMovie(BaseModel):
action = IntegerField()
description = TextField()
id = AutoField()
language = TextField(null=True)
provider = TextField(null=True)
radarrId = IntegerField()
score = IntegerField(null=True)
subs_id = TextField(null=True)
subtitles_path = TextField(null=True)
timestamp = DateTimeField()
video_path = TextField(null=True)
class TableHistoryMovie(Base):
__tablename__ = 'table_history_movie'
class Meta:
table_name = 'table_history_movie'
id = mapped_column(Integer, primary_key=True)
action = mapped_column(Integer, nullable=False)
description = mapped_column(Text, nullable=False)
language = mapped_column(Text)
provider = mapped_column(Text)
radarrId = mapped_column(Integer, ForeignKey('table_movies.radarrId', ondelete='CASCADE'))
score = mapped_column(Integer)
subs_id = mapped_column(Text)
subtitles_path = mapped_column(Text)
timestamp = mapped_column(DateTime, nullable=False, default=datetime.now)
video_path = mapped_column(Text)
matched = mapped_column(Text)
not_matched = mapped_column(Text)
class TableLanguagesProfiles(BaseModel):
cutoff = IntegerField(null=True)
originalFormat = BooleanField(null=True)
items = TextField()
name = TextField()
profileId = AutoField()
mustContain = TextField(null=True)
mustNotContain = TextField(null=True)
class TableLanguagesProfiles(Base):
__tablename__ = 'table_languages_profiles'
class Meta:
table_name = 'table_languages_profiles'
profileId = mapped_column(Integer, primary_key=True)
cutoff = mapped_column(Integer)
originalFormat = mapped_column(Integer)
items = mapped_column(Text, nullable=False)
name = mapped_column(Text, nullable=False)
mustContain = mapped_column(Text)
mustNotContain = mapped_column(Text)
class TableMovies(BaseModel):
rowid = RowIDField()
alternativeTitles = TextField(null=True)
audio_codec = TextField(null=True)
audio_language = TextField(null=True)
failedAttempts = TextField(null=True)
fanart = TextField(null=True)
ffprobe_cache = BlobField(null=True)
file_size = BigIntegerField(default=0, null=True)
format = TextField(null=True)
imdbId = TextField(null=True)
missing_subtitles = TextField(null=True)
monitored = TextField(null=True)
movie_file_id = IntegerField(null=True)
overview = TextField(null=True)
path = TextField(unique=True)
poster = TextField(null=True)
profileId = IntegerField(null=True)
radarrId = IntegerField(unique=True)
resolution = TextField(null=True)
sceneName = TextField(null=True)
sortTitle = TextField(null=True)
subtitles = TextField(null=True)
tags = TextField(null=True)
title = TextField()
tmdbId = TextField(unique=True)
video_codec = TextField(null=True)
year = TextField(null=True)
class TableMovies(Base):
__tablename__ = 'table_movies'
class Meta:
table_name = 'table_movies'
alternativeTitles = mapped_column(Text)
audio_codec = mapped_column(Text)
audio_language = mapped_column(Text)
failedAttempts = mapped_column(Text)
fanart = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(Integer)
format = mapped_column(Text)
imdbId = mapped_column(Text)
missing_subtitles = mapped_column(Text)
monitored = mapped_column(Text)
movie_file_id = mapped_column(Integer)
overview = mapped_column(Text)
path = mapped_column(Text, nullable=False, unique=True)
poster = mapped_column(Text)
profileId = mapped_column(Integer, ForeignKey('table_languages_profiles.profileId', ondelete='SET NULL'))
radarrId = mapped_column(Integer, primary_key=True)
resolution = mapped_column(Text)
sceneName = mapped_column(Text)
sortTitle = mapped_column(Text)
subtitles = mapped_column(Text)
tags = mapped_column(Text)
title = mapped_column(Text, nullable=False)
tmdbId = mapped_column(Text, nullable=False, unique=True)
video_codec = mapped_column(Text)
year = mapped_column(Text)
class TableMoviesRootfolder(BaseModel):
accessible = IntegerField(null=True)
error = TextField(null=True)
id = IntegerField(null=True)
path = TextField(null=True)
class TableMoviesRootfolder(Base):
__tablename__ = 'table_movies_rootfolder'
class Meta:
table_name = 'table_movies_rootfolder'
primary_key = False
accessible = mapped_column(Integer)
error = mapped_column(Text)
id = mapped_column(Integer, primary_key=True)
path = mapped_column(Text)
class TableSettingsLanguages(BaseModel):
code2 = TextField(null=True)
code3 = TextField(primary_key=True)
code3b = TextField(null=True)
enabled = IntegerField(null=True)
name = TextField()
class TableSettingsLanguages(Base):
__tablename__ = 'table_settings_languages'
class Meta:
table_name = 'table_settings_languages'
code3 = mapped_column(Text, primary_key=True)
code2 = mapped_column(Text)
code3b = mapped_column(Text)
enabled = mapped_column(Integer)
name = mapped_column(Text, nullable=False)
class TableSettingsNotifier(BaseModel):
enabled = IntegerField(null=True)
name = TextField(null=True, primary_key=True)
url = TextField(null=True)
class TableSettingsNotifier(Base):
__tablename__ = 'table_settings_notifier'
class Meta:
table_name = 'table_settings_notifier'
name = mapped_column(Text, primary_key=True)
enabled = mapped_column(Integer)
url = mapped_column(Text)
class TableShows(BaseModel):
alternativeTitles = TextField(null=True)
audio_language = TextField(null=True)
fanart = TextField(null=True)
imdbId = TextField(default='""', null=True)
monitored = TextField(null=True)
overview = TextField(null=True)
path = TextField(unique=True)
poster = TextField(null=True)
profileId = IntegerField(null=True)
seriesType = TextField(null=True)
sonarrSeriesId = IntegerField(unique=True)
sortTitle = TextField(null=True)
tags = TextField(null=True)
title = TextField()
tvdbId = AutoField()
year = TextField(null=True)
class TableShows(Base):
__tablename__ = 'table_shows'
class Meta:
table_name = 'table_shows'
tvdbId = mapped_column(Integer)
alternativeTitles = mapped_column(Text)
audio_language = mapped_column(Text)
fanart = mapped_column(Text)
imdbId = mapped_column(Text)
monitored = mapped_column(Text)
overview = mapped_column(Text)
path = mapped_column(Text, nullable=False, unique=True)
poster = mapped_column(Text)
profileId = mapped_column(Integer, ForeignKey('table_languages_profiles.profileId', ondelete='SET NULL'))
seriesType = mapped_column(Text)
sonarrSeriesId = mapped_column(Integer, primary_key=True)
sortTitle = mapped_column(Text)
tags = mapped_column(Text)
title = mapped_column(Text, nullable=False)
year = mapped_column(Text)
class TableShowsRootfolder(BaseModel):
accessible = IntegerField(null=True)
error = TextField(null=True)
id = IntegerField(null=True)
path = TextField(null=True)
class TableShowsRootfolder(Base):
__tablename__ = 'table_shows_rootfolder'
class Meta:
table_name = 'table_shows_rootfolder'
primary_key = False
class TableCustomScoreProfiles(BaseModel):
id = AutoField()
name = TextField(null=True)
media = TextField(null=True)
score = IntegerField(null=True)
class Meta:
table_name = 'table_custom_score_profiles'
class TableCustomScoreProfileConditions(BaseModel):
profile_id = ForeignKeyField(TableCustomScoreProfiles, to_field="id")
type = TextField(null=True) # provider, uploader, regex, etc
value = TextField(null=True) # opensubtitles, jane_doe, [a-z], etc
required = BooleanField(default=False)
negate = BooleanField(default=False)
class Meta:
table_name = 'table_custom_score_profile_conditions'
class TableAnnouncements(BaseModel):
timestamp = DateTimeField()
hash = TextField(null=True, unique=True)
text = TextField(null=True)
class Meta:
table_name = 'table_announcements'
accessible = mapped_column(Integer)
error = mapped_column(Text)
id = mapped_column(Integer, primary_key=True)
path = mapped_column(Text)
def init_db():
# Create tables if they don't exists.
database.create_tables([System,
TableBlacklist,
TableBlacklistMovie,
TableEpisodes,
TableHistory,
TableHistoryMovie,
TableLanguagesProfiles,
TableMovies,
TableMoviesRootfolder,
TableSettingsLanguages,
TableSettingsNotifier,
TableShows,
TableShowsRootfolder,
TableCustomScoreProfiles,
TableCustomScoreProfileConditions,
TableAnnouncements])
database.begin()
# Create tables if they don't exist.
metadata.create_all(engine)
def create_db_revision(app):
logging.info("Creating a new database revision for future migration")
app.config["SQLALCHEMY_DATABASE_URI"] = url
db = SQLAlchemy(app, metadata=metadata)
with app.app_context():
flask_migrate.Migrate(app, db, render_as_batch=True)
flask_migrate.migrate(directory=migrations_directory)
db.engine.dispose()
def migrate_db(app):
logging.debug("Upgrading database schema")
app.config["SQLALCHEMY_DATABASE_URI"] = url
db = SQLAlchemy(app, metadata=metadata)
insp = inspect(engine)
alembic_temp_tables_list = [x for x in insp.get_table_names() if x.startswith('_alembic_tmp_')]
for table in alembic_temp_tables_list:
database.execute(text(f"DROP TABLE IF EXISTS {table}"))
with app.app_context():
flask_migrate.Migrate(app, db, render_as_batch=True)
flask_migrate.upgrade(directory=migrations_directory)
db.engine.dispose()
# add the system table single row if it's not existing
# we must retry until the tables are created
tables_created = False
while not tables_created:
try:
if not System.select().count():
System.insert({System.configured: '0', System.updated: '0'}).execute()
except Exception:
time.sleep(0.1)
else:
tables_created = True
def migrate_db():
table_shows = [t.name for t in database.get_columns('table_shows')]
table_episodes = [t.name for t in database.get_columns('table_episodes')]
table_movies = [t.name for t in database.get_columns('table_movies')]
table_history = [t.name for t in database.get_columns('table_history')]
table_history_movie = [t.name for t in database.get_columns('table_history_movie')]
table_languages_profiles = [t.name for t in database.get_columns('table_languages_profiles')]
if "year" not in table_shows:
migrate(migrator.add_column('table_shows', 'year', TextField(null=True)))
if "alternativeTitle" not in table_shows:
migrate(migrator.add_column('table_shows', 'alternativeTitle', TextField(null=True)))
if "tags" not in table_shows:
migrate(migrator.add_column('table_shows', 'tags', TextField(default='[]', null=True)))
if "seriesType" not in table_shows:
migrate(migrator.add_column('table_shows', 'seriesType', TextField(default='""', null=True)))
if "imdbId" not in table_shows:
migrate(migrator.add_column('table_shows', 'imdbId', TextField(default='""', null=True)))
if "profileId" not in table_shows:
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
if "profileId" not in table_shows:
migrate(migrator.add_column('table_shows', 'profileId', IntegerField(null=True)))
if "monitored" not in table_shows:
migrate(migrator.add_column('table_shows', 'monitored', TextField(null=True)))
if "format" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'format', TextField(null=True)))
if "resolution" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'resolution', TextField(null=True)))
if "video_codec" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'video_codec', TextField(null=True)))
if "audio_codec" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'audio_codec', TextField(null=True)))
if "episode_file_id" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'episode_file_id', IntegerField(null=True)))
if "audio_language" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'audio_language', TextField(null=True)))
if "file_size" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'file_size', BigIntegerField(default=0, null=True)))
if "ffprobe_cache" not in table_episodes:
migrate(migrator.add_column('table_episodes', 'ffprobe_cache', BlobField(null=True)))
if "sortTitle" not in table_movies:
migrate(migrator.add_column('table_movies', 'sortTitle', TextField(null=True)))
if "year" not in table_movies:
migrate(migrator.add_column('table_movies', 'year', TextField(null=True)))
if "alternativeTitles" not in table_movies:
migrate(migrator.add_column('table_movies', 'alternativeTitles', TextField(null=True)))
if "format" not in table_movies:
migrate(migrator.add_column('table_movies', 'format', TextField(null=True)))
if "resolution" not in table_movies:
migrate(migrator.add_column('table_movies', 'resolution', TextField(null=True)))
if "video_codec" not in table_movies:
migrate(migrator.add_column('table_movies', 'video_codec', TextField(null=True)))
if "audio_codec" not in table_movies:
migrate(migrator.add_column('table_movies', 'audio_codec', TextField(null=True)))
if "imdbId" not in table_movies:
migrate(migrator.add_column('table_movies', 'imdbId', TextField(null=True)))
if "movie_file_id" not in table_movies:
migrate(migrator.add_column('table_movies', 'movie_file_id', IntegerField(null=True)))
if "tags" not in table_movies:
migrate(migrator.add_column('table_movies', 'tags', TextField(default='[]', null=True)))
if "profileId" not in table_movies:
migrate(migrator.add_column('table_movies', 'profileId', IntegerField(null=True)))
if "file_size" not in table_movies:
migrate(migrator.add_column('table_movies', 'file_size', BigIntegerField(default=0, null=True)))
if "ffprobe_cache" not in table_movies:
migrate(migrator.add_column('table_movies', 'ffprobe_cache', BlobField(null=True)))
if "video_path" not in table_history:
migrate(migrator.add_column('table_history', 'video_path', TextField(null=True)))
if "language" not in table_history:
migrate(migrator.add_column('table_history', 'language', TextField(null=True)))
if "provider" not in table_history:
migrate(migrator.add_column('table_history', 'provider', TextField(null=True)))
if "score" not in table_history:
migrate(migrator.add_column('table_history', 'score', TextField(null=True)))
if "subs_id" not in table_history:
migrate(migrator.add_column('table_history', 'subs_id', TextField(null=True)))
if "subtitles_path" not in table_history:
migrate(migrator.add_column('table_history', 'subtitles_path', TextField(null=True)))
if "video_path" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'video_path', TextField(null=True)))
if "language" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'language', TextField(null=True)))
if "provider" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'provider', TextField(null=True)))
if "score" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'score', TextField(null=True)))
if "subs_id" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'subs_id', TextField(null=True)))
if "subtitles_path" not in table_history_movie:
migrate(migrator.add_column('table_history_movie', 'subtitles_path', TextField(null=True)))
if "mustContain" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'mustContain', TextField(null=True)))
if "mustNotContain" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'mustNotContain', TextField(null=True)))
if "originalFormat" not in table_languages_profiles:
migrate(migrator.add_column('table_languages_profiles', 'originalFormat', BooleanField(null=True)))
if "languages" in table_shows:
migrate(migrator.drop_column('table_shows', 'languages'))
if "hearing_impaired" in table_shows:
migrate(migrator.drop_column('table_shows', 'hearing_impaired'))
if "languages" in table_movies:
migrate(migrator.drop_column('table_movies', 'languages'))
if "hearing_impaired" in table_movies:
migrate(migrator.drop_column('table_movies', 'hearing_impaired'))
if not any(
x
for x in database.get_columns('table_blacklist')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_blacklist', 'timestamp', DateTimeField(default=datetime.now)))
update = TableBlacklist.select()
for item in update:
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
if not any(
x
for x in database.get_columns('table_blacklist_movie')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_blacklist_movie', 'timestamp', DateTimeField(default=datetime.now)))
update = TableBlacklistMovie.select()
for item in update:
item.update({"timestamp": datetime.fromtimestamp(int(item.timestamp))}).execute()
if not any(
x for x in database.get_columns('table_history') if x.name == "score" and x.data_type.lower() == "integer"):
migrate(migrator.alter_column_type('table_history', 'score', IntegerField(null=True)))
if not any(
x
for x in database.get_columns('table_history')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_history', 'timestamp', DateTimeField(default=datetime.now)))
update = TableHistory.select()
list_to_update = []
for i, item in enumerate(update):
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
list_to_update.append(item)
if i % 100 == 0:
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
list_to_update = []
if list_to_update:
TableHistory.bulk_update(list_to_update, fields=[TableHistory.timestamp])
if not any(x for x in database.get_columns('table_history_movie') if
x.name == "score" and x.data_type.lower() == "integer"):
migrate(migrator.alter_column_type('table_history_movie', 'score', IntegerField(null=True)))
if not any(
x
for x in database.get_columns('table_history_movie')
if x.name == "timestamp" and x.data_type in ["DATETIME", "timestamp without time zone"]
):
migrate(migrator.alter_column_type('table_history_movie', 'timestamp', DateTimeField(default=datetime.now)))
update = TableHistoryMovie.select()
list_to_update = []
for i, item in enumerate(update):
item.timestamp = datetime.fromtimestamp(int(item.timestamp))
list_to_update.append(item)
if i % 100 == 0:
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
list_to_update = []
if list_to_update:
TableHistoryMovie.bulk_update(list_to_update, fields=[TableHistoryMovie.timestamp])
# if not any(x for x in database.get_columns('table_movies') if x.name == "monitored" and x.data_type == "BOOLEAN"):
# migrate(migrator.alter_column_type('table_movies', 'monitored', BooleanField(null=True)))
if database.get_columns('table_settings_providers'):
database.execute_sql('drop table if exists table_settings_providers;')
if "alternateTitles" in table_shows:
migrate(migrator.rename_column('table_shows', 'alternateTitles', "alternativeTitles"))
if "scene_name" in table_episodes:
migrate(migrator.rename_column('table_episodes', 'scene_name', "sceneName"))
class SqliteDictPathMapper:
def __init__(self):
pass
@staticmethod
def path_replace(values_dict):
if type(values_dict) is list:
for item in values_dict:
item['path'] = path_mappings.path_replace(item['path'])
elif type(values_dict) is dict:
values_dict['path'] = path_mappings.path_replace(values_dict['path'])
else:
return path_mappings.path_replace(values_dict)
@staticmethod
def path_replace_movie(values_dict):
if type(values_dict) is list:
for item in values_dict:
item['path'] = path_mappings.path_replace_movie(item['path'])
elif type(values_dict) is dict:
values_dict['path'] = path_mappings.path_replace_movie(values_dict['path'])
else:
return path_mappings.path_replace_movie(values_dict)
dict_mapper = SqliteDictPathMapper()
if not database.execute(
select(System)) \
.first():
database.execute(
insert(System)
.values(configured='0', updated='0'))
def get_exclusion_clause(exclusion_type):
@ -568,12 +335,12 @@ def get_exclusion_clause(exclusion_type):
if exclusion_type == 'series':
monitoredOnly = settings.sonarr.getboolean('only_monitored')
if monitoredOnly:
where_clause.append((TableEpisodes.monitored == True)) # noqa E712
where_clause.append((TableShows.monitored == True)) # noqa E712
where_clause.append((TableEpisodes.monitored == 'True')) # noqa E712
where_clause.append((TableShows.monitored == 'True')) # noqa E712
else:
monitoredOnly = settings.radarr.getboolean('only_monitored')
if monitoredOnly:
where_clause.append((TableMovies.monitored == True)) # noqa E712
where_clause.append((TableMovies.monitored == 'True')) # noqa E712
if exclusion_type == 'series':
typesList = get_array_from(settings.sonarr.excluded_series_types)
@ -589,20 +356,24 @@ def get_exclusion_clause(exclusion_type):
@region.cache_on_arguments()
def update_profile_id_list():
profile_id_list = TableLanguagesProfiles.select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat).dicts()
profile_id_list = list(profile_id_list)
for profile in profile_id_list:
profile['items'] = json.loads(profile['items'])
profile['mustContain'] = ast.literal_eval(profile['mustContain']) if profile['mustContain'] else []
profile['mustNotContain'] = ast.literal_eval(profile['mustNotContain']) if profile['mustNotContain'] else []
return profile_id_list
return [{
'profileId': x.profileId,
'name': x.name,
'cutoff': x.cutoff,
'items': json.loads(x.items),
'mustContain': ast.literal_eval(x.mustContain) if x.mustContain else [],
'mustNotContain': ast.literal_eval(x.mustNotContain) if x.mustNotContain else [],
'originalFormat': x.originalFormat,
} for x in database.execute(
select(TableLanguagesProfiles.profileId,
TableLanguagesProfiles.name,
TableLanguagesProfiles.cutoff,
TableLanguagesProfiles.items,
TableLanguagesProfiles.mustContain,
TableLanguagesProfiles.mustNotContain,
TableLanguagesProfiles.originalFormat))
.all()
]
def get_profiles_list(profile_id=None):
@ -617,36 +388,15 @@ def get_profiles_list(profile_id=None):
def get_desired_languages(profile_id):
languages = []
profile_id_list = update_profile_id_list()
if profile_id and profile_id != 'null':
for profile in profile_id_list:
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat = profile.values()
try:
profile_id_int = int(profile_id)
except ValueError:
continue
else:
if profileId == profile_id_int:
languages = [x['language'] for x in items]
break
return languages
for profile in update_profile_id_list():
if profile['profileId'] == profile_id:
return [x['language'] for x in profile['items']]
def get_profile_id_name(profile_id):
name_from_id = None
profile_id_list = update_profile_id_list()
if profile_id and profile_id != 'null':
for profile in profile_id_list:
profileId, name, cutoff, items, mustContain, mustNotContain, originalFormat = profile.values()
if profileId == int(profile_id):
name_from_id = name
break
return name_from_id
for profile in update_profile_id_list():
if profile['profileId'] == profile_id:
return profile['name']
def get_profile_cutoff(profile_id):
@ -703,23 +453,27 @@ def get_audio_profile_languages(audio_languages_list_str):
def get_profile_id(series_id=None, episode_id=None, movie_id=None):
if series_id:
data = TableShows.select(TableShows.profileId) \
.where(TableShows.sonarrSeriesId == series_id) \
.get_or_none()
data = database.execute(
select(TableShows.profileId)
.where(TableShows.sonarrSeriesId == series_id))\
.first()
if data:
return data.profileId
elif episode_id:
data = TableShows.select(TableShows.profileId) \
.join(TableEpisodes, on=(TableShows.sonarrSeriesId == TableEpisodes.sonarrSeriesId)) \
.where(TableEpisodes.sonarrEpisodeId == episode_id) \
.get_or_none()
data = database.execute(
select(TableShows.profileId)
.select_from(TableShows)
.join(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == episode_id)) \
.first()
if data:
return data.profileId
elif movie_id:
data = TableMovies.select(TableMovies.profileId) \
.where(TableMovies.radarrId == movie_id) \
.get_or_none()
data = database.execute(
select(TableMovies.profileId)
.where(TableMovies.radarrId == movie_id))\
.first()
if data:
return data.profileId

View File

@ -30,6 +30,8 @@ parser.add_argument('--no-tasks', default=False, type=bool, const=True, metavar=
help="Disable all tasks (default: False)")
parser.add_argument('--no-signalr', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Disable SignalR connections to Sonarr and/or Radarr (default: False)")
parser.add_argument('--create-db-revision', default=False, type=bool, const=True, metavar="BOOL", nargs="?",
help="Create a new database revision that will be used to migrate database")
if not no_cli:

View File

@ -10,6 +10,8 @@ import pretty
import time
import socket
import requests
import traceback
import re
from subliminal_patch.exceptions import TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked, \
MustGetBlacklisted, SearchLimitReached
@ -25,6 +27,10 @@ from app.event_handler import event_stream
from utilities.binaries import get_binary
from radarr.blacklist import blacklist_log_movie
from sonarr.blacklist import blacklist_log
from utilities.analytics import event_tracker
_TRACEBACK_RE = re.compile(r'File "(.*?providers/.*?)", line (\d+)')
def time_until_midnight(timezone):
@ -331,7 +337,7 @@ def provider_throttle(name, exception):
throttle_until = datetime.datetime.now() + throttle_delta
if cls_name not in VALID_COUNT_EXCEPTIONS or throttled_count(name):
if cls_name == 'ValueError' and exception.args[0].startswith('unsupported pickle protocol'):
if cls_name == 'ValueError' and isinstance(exception.args, tuple) and len(exception.args) and exception.args[0].startswith('unsupported pickle protocol'):
for fn in subliminal_cache_region.backend.all_filenames:
try:
os.remove(fn)
@ -341,13 +347,36 @@ def provider_throttle(name, exception):
tp[name] = (cls_name, throttle_until, throttle_description)
set_throttled_providers(str(tp))
trac_info = _get_traceback_info(exception)
logging.info("Throttling %s for %s, until %s, because of: %s. Exception info: %r", name,
throttle_description, throttle_until.strftime("%y/%m/%d %H:%M"), cls_name, exception.args[0]
if exception.args else None)
throttle_description, throttle_until.strftime("%y/%m/%d %H:%M"), cls_name, trac_info)
event_tracker.track_throttling(provider=name, exception_name=cls_name, exception_info=trac_info)
update_throttled_provider()
def _get_traceback_info(exc: Exception):
traceback_str = " ".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
clean_msg = str(exc).replace("\n", " ").strip()
line_info = _TRACEBACK_RE.findall(traceback_str)
# Value info max chars len is 100
if not line_info:
return clean_msg[:100]
line_info = line_info[-1]
file_, line = line_info
extra = f"' ~ {os.path.basename(file_)}@{line}"[:90]
message = f"'{clean_msg}"[:100 - len(extra)]
return message + extra
def throttled_count(name):
global throttle_count
if name in list(throttle_count.keys()):

View File

@ -59,6 +59,7 @@ class NoExceptionFormatter(logging.Formatter):
def configure_logging(debug=False):
warnings.simplefilter('ignore', category=ResourceWarning)
warnings.simplefilter('ignore', category=PytzUsageWarning)
# warnings.simplefilter('ignore', category=SAWarning)
if not debug:
log_level = "INFO"
@ -93,7 +94,7 @@ def configure_logging(debug=False):
logger.addHandler(fh)
if debug:
logging.getLogger("peewee").setLevel(logging.DEBUG)
logging.getLogger("alembic.runtime.migration").setLevel(logging.DEBUG)
logging.getLogger("apscheduler").setLevel(logging.DEBUG)
logging.getLogger("subliminal").setLevel(logging.DEBUG)
logging.getLogger("subliminal_patch").setLevel(logging.DEBUG)
@ -111,7 +112,7 @@ def configure_logging(debug=False):
logging.debug('Operating system: %s', platform.platform())
logging.debug('Python version: %s', platform.python_version())
else:
logging.getLogger("peewee").setLevel(logging.CRITICAL)
logging.getLogger("alembic.runtime.migration").setLevel(logging.CRITICAL)
logging.getLogger("apscheduler").setLevel(logging.WARNING)
logging.getLogger("apprise").setLevel(logging.WARNING)
logging.getLogger("subliminal").setLevel(logging.CRITICAL)

View File

@ -3,99 +3,70 @@
import apprise
import logging
from .database import TableSettingsNotifier, TableEpisodes, TableShows, TableMovies
from .database import TableSettingsNotifier, TableEpisodes, TableShows, TableMovies, database, insert, delete, select
def update_notifier():
# define apprise object
a = apprise.Apprise()
# Retrieve all of the details
# Retrieve all the details
results = a.details()
notifiers_new = []
notifiers_old = []
notifiers_added = []
notifiers_kept = []
notifiers_current_db = TableSettingsNotifier.select(TableSettingsNotifier.name).dicts()
notifiers_current = []
for notifier in notifiers_current_db:
notifiers_current.append([notifier['name']])
notifiers_in_db = [row.name for row in
database.execute(
select(TableSettingsNotifier.name))
.all()]
for x in results['schemas']:
if [str(x['service_name'])] not in notifiers_current:
notifiers_new.append({'name': str(x['service_name']), 'enabled': 0})
if x['service_name'] not in notifiers_in_db:
notifiers_added.append({'name': str(x['service_name']), 'enabled': 0})
logging.debug('Adding new notifier agent: ' + str(x['service_name']))
else:
notifiers_old.append([str(x['service_name'])])
notifiers_kept.append(x['service_name'])
notifiers_to_delete = [item for item in notifiers_current if item not in notifiers_old]
TableSettingsNotifier.insert_many(notifiers_new).execute()
notifiers_to_delete = [item for item in notifiers_in_db if item not in notifiers_kept]
for item in notifiers_to_delete:
TableSettingsNotifier.delete().where(TableSettingsNotifier.name == item).execute()
database.execute(
delete(TableSettingsNotifier)
.where(TableSettingsNotifier.name == item))
database.execute(
insert(TableSettingsNotifier)
.values(notifiers_added))
def get_notifier_providers():
providers = TableSettingsNotifier.select(TableSettingsNotifier.name,
TableSettingsNotifier.url)\
.where(TableSettingsNotifier.enabled == 1)\
.dicts()
return providers
def get_series(sonarr_series_id):
data = TableShows.select(TableShows.title, TableShows.year)\
.where(TableShows.sonarrSeriesId == sonarr_series_id)\
.dicts()\
.get_or_none()
if not data:
return
return {'title': data['title'], 'year': data['year']}
def get_episode_name(sonarr_episode_id):
data = TableEpisodes.select(TableEpisodes.title, TableEpisodes.season, TableEpisodes.episode)\
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id)\
.dicts()\
.get_or_none()
if not data:
return
return data['title'], data['season'], data['episode']
def get_movie(radarr_id):
data = TableMovies.select(TableMovies.title, TableMovies.year)\
.where(TableMovies.radarrId == radarr_id)\
.dicts()\
.get_or_none()
if not data:
return
return {'title': data['title'], 'year': data['year']}
return database.execute(
select(TableSettingsNotifier.name, TableSettingsNotifier.url)
.where(TableSettingsNotifier.enabled == 1))\
.all()
def send_notifications(sonarr_series_id, sonarr_episode_id, message):
providers = get_notifier_providers()
if not len(providers):
return
series = get_series(sonarr_series_id)
series = database.execute(
select(TableShows.title, TableShows.year)
.where(TableShows.sonarrSeriesId == sonarr_series_id))\
.first()
if not series:
return
series_title = series['title']
series_year = series['year']
series_title = series.title
series_year = series.year
if series_year not in [None, '', '0']:
series_year = ' ({})'.format(series_year)
else:
series_year = ''
episode = get_episode_name(sonarr_episode_id)
episode = database.execute(
select(TableEpisodes.title, TableEpisodes.season, TableEpisodes.episode)
.where(TableEpisodes.sonarrEpisodeId == sonarr_episode_id))\
.first()
if not episode:
return
@ -104,13 +75,13 @@ def send_notifications(sonarr_series_id, sonarr_episode_id, message):
apobj = apprise.Apprise(asset=asset)
for provider in providers:
if provider['url'] is not None:
apobj.add(provider['url'])
if provider.url is not None:
apobj.add(provider.url)
apobj.notify(
title='Bazarr notification',
body="{}{} - S{:02d}E{:02d} - {} : {}".format(series_title, series_year, episode[1], episode[2], episode[0],
message),
body="{}{} - S{:02d}E{:02d} - {} : {}".format(series_title, series_year, episode.season, episode.episode,
episode.title, message),
)
@ -118,11 +89,14 @@ def send_notifications_movie(radarr_id, message):
providers = get_notifier_providers()
if not len(providers):
return
movie = get_movie(radarr_id)
movie = database.execute(
select(TableMovies.title, TableMovies.year)
.where(TableMovies.radarrId == radarr_id))\
.first()
if not movie:
return
movie_title = movie['title']
movie_year = movie['year']
movie_title = movie.title
movie_year = movie.year
if movie_year not in [None, '', '0']:
movie_year = ' ({})'.format(movie_year)
else:
@ -133,8 +107,8 @@ def send_notifications_movie(radarr_id, message):
apobj = apprise.Apprise(asset=asset)
for provider in providers:
if provider['url'] is not None:
apobj.add(provider['url'])
if provider.url is not None:
apobj.add(provider.url)
apobj.notify(
title='Bazarr notification',

View File

@ -19,7 +19,7 @@ import logging
from app.announcements import get_announcements_to_file
from sonarr.sync.series import update_series
from sonarr.sync.episodes import sync_episodes, update_all_episodes
from sonarr.sync.episodes import update_all_episodes
from radarr.sync.movies import update_movies, update_all_movies
from subtitles.wanted import wanted_search_missing_subtitles_series, wanted_search_missing_subtitles_movies
from subtitles.upgrade import upgrade_subtitles
@ -163,18 +163,14 @@ class Scheduler:
if settings.general.getboolean('use_sonarr'):
self.aps_scheduler.add_job(
update_series, IntervalTrigger(minutes=int(settings.sonarr.series_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_series', name='Update Series list from Sonarr',
replace_existing=True)
self.aps_scheduler.add_job(
sync_episodes, IntervalTrigger(minutes=int(settings.sonarr.episodes_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='sync_episodes', name='Sync episodes with Sonarr',
coalesce=True, misfire_grace_time=15, id='update_series', name='Sync with Sonarr',
replace_existing=True)
def __radarr_update_task(self):
if settings.general.getboolean('use_radarr'):
self.aps_scheduler.add_job(
update_movies, IntervalTrigger(minutes=int(settings.radarr.movies_sync)), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_movies', name='Update Movie list from Radarr',
coalesce=True, misfire_grace_time=15, id='update_movies', name='Sync with Radarr',
replace_existing=True)
def __cache_cleanup_task(self):
@ -210,18 +206,18 @@ class Scheduler:
self.aps_scheduler.add_job(
update_all_episodes, CronTrigger(hour=settings.sonarr.full_update_hour), max_instances=1,
coalesce=True, misfire_grace_time=15, id='update_all_episodes',
name='Update all Episode Subtitles from disk', replace_existing=True)
name='Index all Episode Subtitles from disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_episodes,
CronTrigger(day_of_week=settings.sonarr.full_update_day, hour=settings.sonarr.full_update_hour),
max_instances=1, coalesce=True, misfire_grace_time=15, id='update_all_episodes',
name='Update all Episode Subtitles from disk', replace_existing=True)
name='Index all Episode Subtitles from disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_episodes, CronTrigger(year='2100'), max_instances=1, coalesce=True,
misfire_grace_time=15, id='update_all_episodes',
name='Update all Episode Subtitles from disk', replace_existing=True)
name='Index all Episode Subtitles from disk', replace_existing=True)
def __radarr_full_update_task(self):
if settings.general.getboolean('use_radarr'):
@ -230,17 +226,17 @@ class Scheduler:
self.aps_scheduler.add_job(
update_all_movies, CronTrigger(hour=settings.radarr.full_update_hour), max_instances=1,
coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Update all Movie Subtitles from disk', replace_existing=True)
id='update_all_movies', name='Index all Movie Subtitles from disk', replace_existing=True)
elif full_update == "Weekly":
self.aps_scheduler.add_job(
update_all_movies,
CronTrigger(day_of_week=settings.radarr.full_update_day, hour=settings.radarr.full_update_hour),
max_instances=1, coalesce=True, misfire_grace_time=15, id='update_all_movies',
name='Update all Movie Subtitles from disk', replace_existing=True)
name='Index all Movie Subtitles from disk', replace_existing=True)
elif full_update == "Manually":
self.aps_scheduler.add_job(
update_all_movies, CronTrigger(year='2100'), max_instances=1, coalesce=True, misfire_grace_time=15,
id='update_all_movies', name='Update all Movie Subtitles from disk', replace_existing=True)
id='update_all_movies', name='Index all Movie Subtitles from disk', replace_existing=True)
def __update_bazarr_task(self):
if not args.no_update and os.environ["BAZARR_VERSION"] != '':

View File

@ -19,7 +19,7 @@ from sonarr.sync.series import update_series, update_one_series
from radarr.sync.movies import update_movies, update_one_movie
from sonarr.info import get_sonarr_info, url_sonarr
from radarr.info import url_radarr
from .database import TableShows
from .database import TableShows, TableMovies, database, select
from .config import settings
from .scheduler import scheduler
@ -73,7 +73,6 @@ class SonarrSignalrClientLegacy:
logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1)
def stop(self, log=True):
try:
@ -150,7 +149,6 @@ class SonarrSignalrClient:
logging.info('BAZARR SignalR client for Sonarr is connected and waiting for events.')
if not args.dev:
scheduler.add_job(update_series, kwargs={'send_event': True}, max_instances=1)
scheduler.add_job(sync_episodes, kwargs={'send_event': True}, max_instances=1)
def on_reconnect_handler(self):
self.connected = False
@ -266,28 +264,39 @@ def dispatcher(data):
series_title = data['body']['resource']['series']['title']
series_year = data['body']['resource']['series']['year']
else:
series_metadata = TableShows.select(TableShows.title, TableShows.year)\
.where(TableShows.sonarrSeriesId == data['body']['resource']['seriesId'])\
.dicts()\
.get_or_none()
series_metadata = database.execute(
select(TableShows.title, TableShows.year)
.where(TableShows.sonarrSeriesId == data['body']['resource']['seriesId']))\
.first()
if series_metadata:
series_title = series_metadata['title']
series_year = series_metadata['year']
series_title = series_metadata.title
series_year = series_metadata.year
episode_title = data['body']['resource']['title']
season_number = data['body']['resource']['seasonNumber']
episode_number = data['body']['resource']['episodeNumber']
elif topic == 'movie':
movie_title = data['body']['resource']['title']
movie_year = data['body']['resource']['year']
if action == 'deleted':
existing_movie_details = database.execute(
select(TableMovies.title, TableMovies.year)
.where(TableMovies.radarrId == media_id)) \
.first()
if existing_movie_details:
movie_title = existing_movie_details.title
movie_year = existing_movie_details.year
else:
return
else:
movie_title = data['body']['resource']['title']
movie_year = data['body']['resource']['year']
except KeyError:
return
if topic == 'series':
logging.debug(f'Event received from Sonarr for series: {series_title} ({series_year})')
update_one_series(series_id=media_id, action=action, send_event=False)
update_one_series(series_id=media_id, action=action)
if episodesChanged:
# this will happen if a season monitored status is changed.
sync_episodes(series_id=media_id, send_event=False)
sync_episodes(series_id=media_id, send_event=True)
elif topic == 'episode':
logging.debug(f'Event received from Sonarr for episode: {series_title} ({series_year}) - '
f'S{season_number:0>2}E{episode_number:0>2} - {episode_title}')

View File

@ -49,7 +49,8 @@ def check_login(actual_method):
def wrapper(*args, **kwargs):
if settings.auth.type == 'basic':
auth = request.authorization
if not (auth and check_credentials(request.authorization.username, request.authorization.password)):
if not (auth and
check_credentials(request.authorization.username, request.authorization.password, request)):
return ('Unauthorized', 401, {
'WWW-Authenticate': 'Basic realm="Login Required"'
})
@ -65,12 +66,13 @@ def catch_all(path):
auth = True
if settings.auth.type == 'basic':
auth = request.authorization
if not (auth and check_credentials(request.authorization.username, request.authorization.password)):
if not (auth and check_credentials(request.authorization.username, request.authorization.password, request,
log_success=False)):
return ('Unauthorized', 401, {
'WWW-Authenticate': 'Basic realm="Login Required"'
})
elif settings.auth.type == 'form':
if 'logged_in' not in session:
if 'logged_in' not in session or not session['logged_in']:
auth = False
try:

View File

@ -18,6 +18,8 @@ from utilities.binaries import get_binary, BinaryNotFound
from utilities.path_mappings import path_mappings
from utilities.backup import restore_from_backup
from app.database import init_db
# set start time global variable as epoch
global startTime
startTime = time.time()
@ -233,9 +235,6 @@ def init_binaries():
return exe
# keep this import at the end to prevent peewee.OperationalError: unable to open database file
from app.database import init_db, migrate_db # noqa E402
init_db()
migrate_db()
init_binaries()
path_mappings.update()

View File

@ -5,6 +5,8 @@ import os
from subzero.language import Language
from app.database import database, insert
logger = logging.getLogger(__name__)
@ -46,9 +48,13 @@ class CustomLanguage:
"Register the custom language subclasses in the database."
for sub in cls.__subclasses__():
table.insert(
{table.code3: sub.alpha3, table.code2: sub.alpha2, table.name: sub.name}
).on_conflict(action="IGNORE").execute()
database.execute(
insert(table)
.values(code3=sub.alpha3,
code2=sub.alpha2,
name=sub.name,
enabled=0)
.on_conflict_do_nothing())
@classmethod
def found_external(cls, subtitle, subtitle_path):
@ -61,7 +67,6 @@ class CustomLanguage:
@classmethod
def get_alpha_type(cls, subtitle: str, subtitle_path=None):
assert subtitle_path is not None
extension = str(os.path.splitext(subtitle)[0]).lower()
to_return = None

View File

@ -5,32 +5,29 @@ import pycountry
from subzero.language import Language
from .custom_lang import CustomLanguage
from app.database import TableSettingsLanguages
from app.database import TableSettingsLanguages, database, insert, update, select
def load_language_in_db():
# Get languages list in langs tuple
langs = [[lang.alpha_3, lang.alpha_2, lang.name]
langs = [{'code3': lang.alpha_3, 'code2': lang.alpha_2, 'name': lang.name, 'enabled': 0}
for lang in pycountry.languages
if hasattr(lang, 'alpha_2')]
# Insert standard languages in database table
TableSettingsLanguages.insert_many(langs,
fields=[TableSettingsLanguages.code3, TableSettingsLanguages.code2,
TableSettingsLanguages.name]) \
.on_conflict(action='IGNORE') \
.execute()
database.execute(
insert(TableSettingsLanguages)
.values(langs)
.on_conflict_do_nothing())
# Update standard languages with code3b if available
langs = [[lang.bibliographic, lang.alpha_3]
langs = [{'code3b': lang.bibliographic, 'code3': lang.alpha_3}
for lang in pycountry.languages
if hasattr(lang, 'alpha_2') and hasattr(lang, 'bibliographic')]
# Update languages in database table
for lang in langs:
TableSettingsLanguages.update({TableSettingsLanguages.code3b: lang[0]}) \
.where(TableSettingsLanguages.code3 == lang[1]) \
.execute()
database.execute(
update(TableSettingsLanguages), langs)
# Insert custom languages in database table
CustomLanguage.register(TableSettingsLanguages)
@ -42,52 +39,58 @@ def load_language_in_db():
def create_languages_dict():
global languages_dict
# replace chinese by chinese simplified
TableSettingsLanguages.update({TableSettingsLanguages.name: 'Chinese Simplified'}) \
.where(TableSettingsLanguages.code3 == 'zho') \
.execute()
database.execute(
update(TableSettingsLanguages)
.values(name='Chinese Simplified')
.where(TableSettingsLanguages.code3 == 'zho'))
languages_dict = TableSettingsLanguages.select(TableSettingsLanguages.name,
TableSettingsLanguages.code2,
TableSettingsLanguages.code3,
TableSettingsLanguages.code3b).dicts()
languages_dict = [{
'code3': x.code3,
'code2': x.code2,
'name': x.name,
'code3b': x.code3b,
} for x in database.execute(
select(TableSettingsLanguages.code3, TableSettingsLanguages.code2, TableSettingsLanguages.name,
TableSettingsLanguages.code3b))
.all()]
def language_from_alpha2(lang):
return next((item["name"] for item in languages_dict if item["code2"] == lang[:2]), None)
return next((item['name'] for item in languages_dict if item['code2'] == lang[:2]), None)
def language_from_alpha3(lang):
return next((item["name"] for item in languages_dict if item["code3"] == lang[:3] or item["code3b"] == lang[:3]),
None)
return next((item['name'] for item in languages_dict if lang[:3] in [item['code3'], item['code3b']]), None)
def alpha2_from_alpha3(lang):
return next((item["code2"] for item in languages_dict if item["code3"] == lang[:3] or item["code3b"] == lang[:3]),
None)
return next((item['code2'] for item in languages_dict if lang[:3] in [item['code3'], item['code3b']]), None)
def alpha2_from_language(lang):
return next((item["code2"] for item in languages_dict if item["name"] == lang), None)
return next((item['code2'] for item in languages_dict if item['name'] == lang), None)
def alpha3_from_alpha2(lang):
return next((item["code3"] for item in languages_dict if item["code2"] == lang[:2]), None)
return next((item['code3'] for item in languages_dict if item['code2'] == lang[:2]), None)
def alpha3_from_language(lang):
return next((item["code3"] for item in languages_dict if item["name"] == lang), None)
return next((item['code3'] for item in languages_dict if item['name'] == lang), None)
def get_language_set():
languages = TableSettingsLanguages.select(TableSettingsLanguages.code3) \
.where(TableSettingsLanguages.enabled == 1).dicts()
languages = database.execute(
select(TableSettingsLanguages.code3)
.where(TableSettingsLanguages.enabled == 1))\
.all()
language_set = set()
for lang in languages:
custom = CustomLanguage.from_value(lang["code3"], "alpha3")
custom = CustomLanguage.from_value(lang.code3, "alpha3")
if custom is None:
language_set.add(Language(lang["code3"]))
language_set.add(Language(lang.code3))
else:
language_set.add(custom.subzero_language())

View File

@ -1,6 +1,8 @@
# coding=utf-8
import os
import io
import logging
from threading import Thread
@ -34,19 +36,35 @@ else:
# there's missing embedded packages after a commit
check_if_new_update()
from app.database import System # noqa E402
from app.database import System, database, update, migrate_db, create_db_revision # noqa E402
from app.notifier import update_notifier # noqa E402
from languages.get_languages import load_language_in_db # noqa E402
from app.signalr_client import sonarr_signalr_client, radarr_signalr_client # noqa E402
from app.server import webserver # noqa E402
from app.server import webserver, app # noqa E402
from app.announcements import get_announcements_to_file # noqa E402
if args.create_db_revision:
try:
stop_file = io.open(os.path.join(args.config_dir, "bazarr.stop"), "w", encoding='UTF-8')
except Exception as e:
logging.error('BAZARR Cannot create stop file: ' + repr(e))
else:
create_db_revision(app)
logging.info('Bazarr is being shutdown...')
stop_file.write(str(''))
stop_file.close()
os._exit(0)
else:
migrate_db(app)
configure_proxy_func()
get_announcements_to_file()
# Reset the updated once Bazarr have been restarted after an update
System.update({System.updated: '0'}).execute()
database.execute(
update(System)
.values(updated='0'))
# Load languages in database
load_language_in_db()

View File

@ -2,38 +2,38 @@
from datetime import datetime
from app.database import TableBlacklistMovie
from app.database import TableBlacklistMovie, database, insert, delete, select
from app.event_handler import event_stream
def get_blacklist_movie():
blacklist_db = TableBlacklistMovie.select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id).dicts()
blacklist_list = []
for item in blacklist_db:
blacklist_list.append((item['provider'], item['subs_id']))
return blacklist_list
return [(item.provider, item.subs_id) for item in
database.execute(
select(TableBlacklistMovie.provider, TableBlacklistMovie.subs_id))
.all()]
def blacklist_log_movie(radarr_id, provider, subs_id, language):
TableBlacklistMovie.insert({
TableBlacklistMovie.radarr_id: radarr_id,
TableBlacklistMovie.timestamp: datetime.now(),
TableBlacklistMovie.provider: provider,
TableBlacklistMovie.subs_id: subs_id,
TableBlacklistMovie.language: language
}).execute()
database.execute(
insert(TableBlacklistMovie)
.values(
radarr_id=radarr_id,
timestamp=datetime.now(),
provider=provider,
subs_id=subs_id,
language=language
))
event_stream(type='movie-blacklist')
def blacklist_delete_movie(provider, subs_id):
TableBlacklistMovie.delete().where((TableBlacklistMovie.provider == provider) and
(TableBlacklistMovie.subs_id == subs_id))\
.execute()
database.execute(
delete(TableBlacklistMovie)
.where((TableBlacklistMovie.provider == provider) and (TableBlacklistMovie.subs_id == subs_id)))
event_stream(type='movie-blacklist', action='delete')
def blacklist_delete_all_movie():
TableBlacklistMovie.delete().execute()
database.execute(
delete(TableBlacklistMovie))
event_stream(type='movie-blacklist', action='delete')

View File

@ -2,7 +2,7 @@
from datetime import datetime
from app.database import TableHistoryMovie
from app.database import TableHistoryMovie, database, insert
from app.event_handler import event_stream
@ -14,17 +14,23 @@ def history_log_movie(action, radarr_id, result, fake_provider=None, fake_score=
score = fake_score or result.score
subs_id = result.subs_id
subtitles_path = result.subs_path
matched = result.matched
not_matched = result.not_matched
TableHistoryMovie.insert({
TableHistoryMovie.action: action,
TableHistoryMovie.radarrId: radarr_id,
TableHistoryMovie.timestamp: datetime.now(),
TableHistoryMovie.description: description,
TableHistoryMovie.video_path: video_path,
TableHistoryMovie.language: language,
TableHistoryMovie.provider: provider,
TableHistoryMovie.score: score,
TableHistoryMovie.subs_id: subs_id,
TableHistoryMovie.subtitles_path: subtitles_path
}).execute()
database.execute(
insert(TableHistoryMovie)
.values(
action=action,
radarrId=radarr_id,
timestamp=datetime.now(),
description=description,
video_path=video_path,
language=language,
provider=provider,
score=score,
subs_id=subs_id,
subtitles_path=subtitles_path,
matched=str(matched) if matched else None,
not_matched=str(not_matched) if not_matched else None
))
event_stream(type='movie-history')

View File

@ -6,7 +6,7 @@ import logging
from app.config import settings
from utilities.path_mappings import path_mappings
from app.database import TableMoviesRootfolder, TableMovies
from app.database import TableMoviesRootfolder, TableMovies, database, delete, update, insert, select
from radarr.info import get_radarr_info, url_radarr
from constants import headers
@ -33,52 +33,61 @@ def get_radarr_rootfolder():
logging.exception("BAZARR Error trying to get rootfolder from Radarr.")
return []
else:
radarr_movies_paths = list(TableMovies.select(TableMovies.path).dicts())
for folder in rootfolder.json():
if any(item['path'].startswith(folder['path']) for item in radarr_movies_paths):
if any(item.path.startswith(folder['path']) for item in database.execute(
select(TableMovies.path))
.all()):
radarr_rootfolder.append({'id': folder['id'], 'path': folder['path']})
db_rootfolder = TableMoviesRootfolder.select(TableMoviesRootfolder.id, TableMoviesRootfolder.path).dicts()
db_rootfolder = database.execute(
select(TableMoviesRootfolder.id, TableMoviesRootfolder.path))\
.all()
rootfolder_to_remove = [x for x in db_rootfolder if not
next((item for item in radarr_rootfolder if item['id'] == x['id']), False)]
next((item for item in radarr_rootfolder if item['id'] == x.id), False)]
rootfolder_to_update = [x for x in radarr_rootfolder if
next((item for item in db_rootfolder if item['id'] == x['id']), False)]
next((item for item in db_rootfolder if item.id == x['id']), False)]
rootfolder_to_insert = [x for x in radarr_rootfolder if not
next((item for item in db_rootfolder if item['id'] == x['id']), False)]
next((item for item in db_rootfolder if item.id == x['id']), False)]
for item in rootfolder_to_remove:
TableMoviesRootfolder.delete().where(TableMoviesRootfolder.id == item['id']).execute()
database.execute(
delete(TableMoviesRootfolder)
.where(TableMoviesRootfolder.id == item.id))
for item in rootfolder_to_update:
TableMoviesRootfolder.update({TableMoviesRootfolder.path: item['path']})\
.where(TableMoviesRootfolder.id == item['id']).execute()
database.execute(
update(TableMoviesRootfolder)
.values(path=item['path'])
.where(TableMoviesRootfolder.id == item['id']))
for item in rootfolder_to_insert:
TableMoviesRootfolder.insert({TableMoviesRootfolder.id: item['id'],
TableMoviesRootfolder.path: item['path']}).execute()
database.execute(
insert(TableMoviesRootfolder)
.values(id=item['id'], path=item['path']))
def check_radarr_rootfolder():
get_radarr_rootfolder()
rootfolder = TableMoviesRootfolder.select(TableMoviesRootfolder.id, TableMoviesRootfolder.path).dicts()
rootfolder = database.execute(
select(TableMoviesRootfolder.id, TableMoviesRootfolder.path))\
.all()
for item in rootfolder:
root_path = item['path']
root_path = item.path
if not root_path.endswith(('/', '\\')):
if root_path.startswith('/'):
root_path += '/'
else:
root_path += '\\'
if not os.path.isdir(path_mappings.path_replace_movie(root_path)):
TableMoviesRootfolder.update({TableMoviesRootfolder.accessible: 0,
TableMoviesRootfolder.error: 'This Radarr root directory does not seems to '
'be accessible by Please check path '
'mapping.'}) \
.where(TableMoviesRootfolder.id == item['id']) \
.execute()
database.execute(
update(TableMoviesRootfolder)
.values(accessible=0, error='This Radarr root directory does not seems to be accessible by Please '
'check path mapping.')
.where(TableMoviesRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace_movie(root_path), os.W_OK):
TableMoviesRootfolder.update({TableMoviesRootfolder.accessible: 0,
TableMoviesRootfolder.error: 'Bazarr cannot write to this directory'}) \
.where(TableMoviesRootfolder.id == item['id']) \
.execute()
database.execute(
update(TableMoviesRootfolder)
.values(accessible=0, error='Bazarr cannot write to this directory')
.where(TableMoviesRootfolder.id == item.id))
else:
TableMoviesRootfolder.update({TableMoviesRootfolder.accessible: 1,
TableMoviesRootfolder.error: ''}) \
.where(TableMoviesRootfolder.id == item['id']) \
.execute()
database.execute(
update(TableMoviesRootfolder)
.values(accessible=1, error='')
.where(TableMoviesRootfolder.id == item.id))

View File

@ -3,7 +3,7 @@
import os
import logging
from peewee import IntegrityError
from sqlalchemy.exc import IntegrityError
from app.config import settings
from radarr.info import url_radarr
@ -11,7 +11,7 @@ from utilities.path_mappings import path_mappings
from subtitles.indexer.movies import store_subtitles_movie, movies_full_scan_subtitles
from radarr.rootfolder import check_radarr_rootfolder
from subtitles.mass_download import movies_download_subtitles
from app.database import TableMovies
from app.database import TableMovies, database, insert, update, delete, select
from app.event_handler import event_stream, show_progress, hide_progress
from .utils import get_profile_list, get_tags, get_movies_from_radarr_api
@ -49,9 +49,10 @@ def update_movies(send_event=True):
return
else:
# Get current movies in DB
current_movies_db = TableMovies.select(TableMovies.tmdbId, TableMovies.path, TableMovies.radarrId).dicts()
current_movies_db_list = [x['tmdbId'] for x in current_movies_db]
current_movies_db = [x.tmdbId for x in
database.execute(
select(TableMovies.tmdbId))
.all()]
current_movies_radarr = []
movies_to_update = []
@ -79,7 +80,7 @@ def update_movies(send_event=True):
# Add movies in radarr to current movies list
current_movies_radarr.append(str(movie['tmdbId']))
if str(movie['tmdbId']) in current_movies_db_list:
if str(movie['tmdbId']) in current_movies_db:
movies_to_update.append(movieParser(movie, action='update',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
@ -94,51 +95,25 @@ def update_movies(send_event=True):
hide_progress(id='movies_progress')
# Remove old movies from DB
removed_movies = list(set(current_movies_db_list) - set(current_movies_radarr))
removed_movies = list(set(current_movies_db) - set(current_movies_radarr))
for removed_movie in removed_movies:
try:
TableMovies.delete().where(TableMovies.tmdbId == removed_movie).execute()
except Exception as e:
logging.error(f"BAZARR cannot remove movie tmdbId {removed_movie} because of {e}")
continue
database.execute(
delete(TableMovies)
.where(TableMovies.tmdbId == removed_movie))
# Update movies in DB
movies_in_db_list = []
movies_in_db = TableMovies.select(TableMovies.radarrId,
TableMovies.title,
TableMovies.path,
TableMovies.tmdbId,
TableMovies.overview,
TableMovies.poster,
TableMovies.fanart,
TableMovies.audio_language,
TableMovies.sceneName,
TableMovies.monitored,
TableMovies.sortTitle,
TableMovies.year,
TableMovies.alternativeTitles,
TableMovies.format,
TableMovies.resolution,
TableMovies.video_codec,
TableMovies.audio_codec,
TableMovies.imdbId,
TableMovies.movie_file_id,
TableMovies.tags,
TableMovies.file_size).dicts()
for item in movies_in_db:
movies_in_db_list.append(item)
movies_to_update_list = [i for i in movies_to_update if i not in movies_in_db_list]
for updated_movie in movies_to_update_list:
try:
TableMovies.update(updated_movie).where(TableMovies.tmdbId == updated_movie['tmdbId']).execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot update movie {updated_movie['path']} because of {e}")
for updated_movie in movies_to_update:
if database.execute(
select(TableMovies)
.filter_by(**updated_movie))\
.first():
continue
else:
database.execute(
update(TableMovies).values(updated_movie)
.where(TableMovies.tmdbId == updated_movie['tmdbId']))
altered_movies.append([updated_movie['tmdbId'],
updated_movie['path'],
updated_movie['radarrId'],
@ -147,21 +122,19 @@ def update_movies(send_event=True):
# Insert new movies in DB
for added_movie in movies_to_add:
try:
result = TableMovies.insert(added_movie).on_conflict_ignore().execute()
database.execute(
insert(TableMovies)
.values(added_movie))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {added_movie['path']} because of {e}")
logging.error(f"BAZARR cannot update movie {added_movie['path']} because of {e}")
continue
else:
if result and result > 0:
altered_movies.append([added_movie['tmdbId'],
added_movie['path'],
added_movie['radarrId'],
added_movie['monitored']])
if send_event:
event_stream(type='movie', action='update', payload=int(added_movie['radarrId']))
else:
logging.debug('BAZARR unable to insert this movie into the database:',
path_mappings.path_replace_movie(added_movie['path']))
altered_movies.append([added_movie['tmdbId'],
added_movie['path'],
added_movie['radarrId'],
added_movie['monitored']])
if send_event:
event_stream(type='movie', action='update', payload=int(added_movie['radarrId']))
# Store subtitles for added or modified movies
for i, altered_movie in enumerate(altered_movies, 1):
@ -174,22 +147,21 @@ def update_one_movie(movie_id, action, defer_search=False):
logging.debug('BAZARR syncing this specific movie from Radarr: {}'.format(movie_id))
# Check if there's a row in database for this movie ID
existing_movie = TableMovies.select(TableMovies.path)\
.where(TableMovies.radarrId == movie_id)\
.dicts()\
.get_or_none()
existing_movie = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == movie_id))\
.first()
# Remove movie from DB
if action == 'deleted':
if existing_movie:
try:
TableMovies.delete().where(TableMovies.radarrId == movie_id).execute()
except Exception as e:
logging.error(f"BAZARR cannot delete movie {existing_movie['path']} because of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie['path'])))
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
return
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
@ -228,31 +200,32 @@ def update_one_movie(movie_id, action, defer_search=False):
# Remove movie from DB
if not movie and existing_movie:
try:
TableMovies.delete().where(TableMovies.radarrId == movie_id).execute()
except Exception as e:
logging.error(f"BAZARR cannot insert episode {existing_movie['path']} because of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie['path'])))
return
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
return
# Update existing movie in DB
elif movie and existing_movie:
try:
TableMovies.update(movie).where(TableMovies.radarrId == movie['radarrId']).execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {movie['path']} because of {e}")
else:
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
database.execute(
update(TableMovies)
.values(movie)
.where(TableMovies.radarrId == movie['radarrId']))
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
# Insert new movie in DB
elif movie and not existing_movie:
try:
TableMovies.insert(movie).on_conflict(action='IGNORE').execute()
database.execute(
insert(TableMovies)
.values(movie))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {movie['path']} because of {e}")
else:

View File

@ -2,39 +2,38 @@
from datetime import datetime
from app.database import TableBlacklist
from app.database import TableBlacklist, database, insert, delete, select
from app.event_handler import event_stream
def get_blacklist():
blacklist_db = TableBlacklist.select(TableBlacklist.provider, TableBlacklist.subs_id).dicts()
blacklist_list = []
for item in blacklist_db:
blacklist_list.append((item['provider'], item['subs_id']))
return blacklist_list
return [(item.provider, item.subs_id) for item in
database.execute(
select(TableBlacklist.provider, TableBlacklist.subs_id))
.all()]
def blacklist_log(sonarr_series_id, sonarr_episode_id, provider, subs_id, language):
TableBlacklist.insert({
TableBlacklist.sonarr_series_id: sonarr_series_id,
TableBlacklist.sonarr_episode_id: sonarr_episode_id,
TableBlacklist.timestamp: datetime.now(),
TableBlacklist.provider: provider,
TableBlacklist.subs_id: subs_id,
TableBlacklist.language: language
}).execute()
database.execute(
insert(TableBlacklist)
.values(
sonarr_series_id=sonarr_series_id,
sonarr_episode_id=sonarr_episode_id,
timestamp=datetime.now(),
provider=provider,
subs_id=subs_id,
language=language
))
event_stream(type='episode-blacklist')
def blacklist_delete(provider, subs_id):
TableBlacklist.delete().where((TableBlacklist.provider == provider) and
(TableBlacklist.subs_id == subs_id))\
.execute()
database.execute(
delete(TableBlacklist)
.where((TableBlacklist.provider == provider) and (TableBlacklist.subs_id == subs_id)))
event_stream(type='episode-blacklist', action='delete')
def blacklist_delete_all():
TableBlacklist.delete().execute()
database.execute(delete(TableBlacklist))
event_stream(type='episode-blacklist', action='delete')

View File

@ -2,7 +2,7 @@
from datetime import datetime
from app.database import TableHistory
from app.database import TableHistory, database, insert
from app.event_handler import event_stream
@ -14,18 +14,24 @@ def history_log(action, sonarr_series_id, sonarr_episode_id, result, fake_provid
score = fake_score or result.score
subs_id = result.subs_id
subtitles_path = result.subs_path
matched = result.matched
not_matched = result.not_matched
TableHistory.insert({
TableHistory.action: action,
TableHistory.sonarrSeriesId: sonarr_series_id,
TableHistory.sonarrEpisodeId: sonarr_episode_id,
TableHistory.timestamp: datetime.now(),
TableHistory.description: description,
TableHistory.video_path: video_path,
TableHistory.language: language,
TableHistory.provider: provider,
TableHistory.score: score,
TableHistory.subs_id: subs_id,
TableHistory.subtitles_path: subtitles_path
}).execute()
database.execute(
insert(TableHistory)
.values(
action=action,
sonarrSeriesId=sonarr_series_id,
sonarrEpisodeId=sonarr_episode_id,
timestamp=datetime.now(),
description=description,
video_path=video_path,
language=language,
provider=provider,
score=score,
subs_id=subs_id,
subtitles_path=subtitles_path,
matched=str(matched) if matched else None,
not_matched=str(not_matched) if not_matched else None
))
event_stream(type='episode-history')

View File

@ -5,7 +5,7 @@ import requests
import logging
from app.config import settings
from app.database import TableShowsRootfolder, TableShows
from app.database import TableShowsRootfolder, TableShows, database, insert, update, delete, select
from utilities.path_mappings import path_mappings
from sonarr.info import get_sonarr_info, url_sonarr
from constants import headers
@ -33,53 +33,61 @@ def get_sonarr_rootfolder():
logging.exception("BAZARR Error trying to get rootfolder from Sonarr.")
return []
else:
sonarr_movies_paths = list(TableShows.select(TableShows.path).dicts())
for folder in rootfolder.json():
if any(item['path'].startswith(folder['path']) for item in sonarr_movies_paths):
if any(item.path.startswith(folder['path']) for item in database.execute(
select(TableShows.path))
.all()):
sonarr_rootfolder.append({'id': folder['id'], 'path': folder['path']})
db_rootfolder = TableShowsRootfolder.select(TableShowsRootfolder.id, TableShowsRootfolder.path).dicts()
db_rootfolder = database.execute(
select(TableShowsRootfolder.id, TableShowsRootfolder.path))\
.all()
rootfolder_to_remove = [x for x in db_rootfolder if not
next((item for item in sonarr_rootfolder if item['id'] == x['id']), False)]
next((item for item in sonarr_rootfolder if item['id'] == x.id), False)]
rootfolder_to_update = [x for x in sonarr_rootfolder if
next((item for item in db_rootfolder if item['id'] == x['id']), False)]
next((item for item in db_rootfolder if item.id == x['id']), False)]
rootfolder_to_insert = [x for x in sonarr_rootfolder if not
next((item for item in db_rootfolder if item['id'] == x['id']), False)]
next((item for item in db_rootfolder if item.id == x['id']), False)]
for item in rootfolder_to_remove:
TableShowsRootfolder.delete().where(TableShowsRootfolder.id == item['id']).execute()
database.execute(
delete(TableShowsRootfolder)
.where(TableShowsRootfolder.id == item.id))
for item in rootfolder_to_update:
TableShowsRootfolder.update({TableShowsRootfolder.path: item['path']})\
.where(TableShowsRootfolder.id == item['id'])\
.execute()
database.execute(
update(TableShowsRootfolder)
.values(path=item['path'])
.where(TableShowsRootfolder.id == item['id']))
for item in rootfolder_to_insert:
TableShowsRootfolder.insert({TableShowsRootfolder.id: item['id'], TableShowsRootfolder.path: item['path']})\
.execute()
database.execute(
insert(TableShowsRootfolder)
.values(id=item['id'], path=item['path']))
def check_sonarr_rootfolder():
get_sonarr_rootfolder()
rootfolder = TableShowsRootfolder.select(TableShowsRootfolder.id, TableShowsRootfolder.path).dicts()
rootfolder = database.execute(
select(TableShowsRootfolder.id, TableShowsRootfolder.path))\
.all()
for item in rootfolder:
root_path = item['path']
root_path = item.path
if not root_path.endswith(('/', '\\')):
if root_path.startswith('/'):
root_path += '/'
else:
root_path += '\\'
if not os.path.isdir(path_mappings.path_replace(root_path)):
TableShowsRootfolder.update({TableShowsRootfolder.accessible: 0,
TableShowsRootfolder.error: 'This Sonarr root directory does not seems to '
'be accessible by Please check path '
'mapping.'})\
.where(TableShowsRootfolder.id == item['id'])\
.execute()
database.execute(
update(TableShowsRootfolder)
.values(accessible=0, error='This Sonarr root directory does not seems to be accessible by Bazarr. '
'Please check path mapping.')
.where(TableShowsRootfolder.id == item.id))
elif not os.access(path_mappings.path_replace(root_path), os.W_OK):
TableShowsRootfolder.update({TableShowsRootfolder.accessible: 0,
TableShowsRootfolder.error: 'Bazarr cannot write to this directory.'}) \
.where(TableShowsRootfolder.id == item['id']) \
.execute()
database.execute(
update(TableShowsRootfolder)
.values(accessible=0, error='Bazarr cannot write to this directory.')
.where(TableShowsRootfolder.id == item.id))
else:
TableShowsRootfolder.update({TableShowsRootfolder.accessible: 1,
TableShowsRootfolder.error: ''}) \
.where(TableShowsRootfolder.id == item['id']) \
.execute()
database.execute(
update(TableShowsRootfolder)
.values(accessible=1, error='')
.where(TableShowsRootfolder.id == item.id))

View File

@ -3,18 +3,18 @@
import os
import logging
from peewee import IntegrityError
from sqlalchemy.exc import IntegrityError
from app.database import TableEpisodes
from app.database import database, TableEpisodes, delete, update, insert, select
from app.config import settings
from utilities.path_mappings import path_mappings
from subtitles.indexer.series import store_subtitles, series_full_scan_subtitles
from subtitles.mass_download import episode_download_subtitles
from app.event_handler import event_stream, show_progress, hide_progress
from app.event_handler import event_stream
from sonarr.info import get_sonarr_info, url_sonarr
from .parser import episodeParser
from .utils import get_series_from_sonarr_api, get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_api
from .utils import get_episodes_from_sonarr_api, get_episodesFiles_from_sonarr_api
def update_all_episodes():
@ -22,147 +22,118 @@ def update_all_episodes():
logging.info('BAZARR All existing episode subtitles indexed from disk.')
def sync_episodes(series_id=None, send_event=True):
def sync_episodes(series_id, send_event=True):
logging.debug('BAZARR Starting episodes sync from Sonarr.')
apikey_sonarr = settings.sonarr.apikey
# Get current episodes id in DB
current_episodes_db = TableEpisodes.select(TableEpisodes.sonarrEpisodeId,
if series_id:
current_episodes_db_list = [row.sonarrEpisodeId for row in
database.execute(
select(TableEpisodes.sonarrEpisodeId,
TableEpisodes.path,
TableEpisodes.sonarrSeriesId)\
.where((TableEpisodes.sonarrSeriesId == series_id) if series_id else None)\
.dicts()
current_episodes_db_list = [x['sonarrEpisodeId'] for x in current_episodes_db]
TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.sonarrSeriesId == series_id)).all()]
else:
return
current_episodes_sonarr = []
episodes_to_update = []
episodes_to_add = []
altered_episodes = []
# Get sonarrId for each series from database
seriesIdList = get_series_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr, sonarr_series_id=series_id)
series_count = len(seriesIdList)
for i, seriesId in enumerate(seriesIdList):
if send_event:
show_progress(id='episodes_progress',
header='Syncing episodes...',
name=seriesId['title'],
value=i,
count=series_count)
# Get episodes data for a series from Sonarr
episodes = get_episodes_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=seriesId['id'])
if not episodes:
continue
else:
# For Sonarr v3, we need to update episodes to integrate the episodeFile API endpoint results
if not get_sonarr_info.is_legacy():
episodeFiles = get_episodesFiles_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=seriesId['id'])
for episode in episodes:
if episodeFiles and episode['hasFile']:
item = [x for x in episodeFiles if x['id'] == episode['episodeFileId']]
if item:
episode['episodeFile'] = item[0]
# Get episodes data for a series from Sonarr
episodes = get_episodes_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=series_id)
if episodes:
# For Sonarr v3, we need to update episodes to integrate the episodeFile API endpoint results
if not get_sonarr_info.is_legacy():
episodeFiles = get_episodesFiles_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
series_id=series_id)
for episode in episodes:
if 'hasFile' in episode:
if episode['hasFile'] is True:
if 'episodeFile' in episode:
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode['id'])
if episodeFiles and episode['hasFile']:
item = [x for x in episodeFiles if x['id'] == episode['episodeFileId']]
if item:
episode['episodeFile'] = item[0]
# Parse episode data
if episode['id'] in current_episodes_db_list:
episodes_to_update.append(episodeParser(episode))
else:
episodes_to_add.append(episodeParser(episode))
for episode in episodes:
if 'hasFile' in episode:
if episode['hasFile'] is True:
if 'episodeFile' in episode:
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace(episode['episodeFile']['path']))
except OSError:
bazarr_file_size = 0
if episode['episodeFile']['size'] > 20480 or bazarr_file_size > 20480:
# Add episodes in sonarr to current episode list
current_episodes_sonarr.append(episode['id'])
if send_event:
hide_progress(id='episodes_progress')
# Parse episode data
if episode['id'] in current_episodes_db_list:
episodes_to_update.append(episodeParser(episode))
else:
episodes_to_add.append(episodeParser(episode))
# Remove old episodes from DB
removed_episodes = list(set(current_episodes_db_list) - set(current_episodes_sonarr))
stmt = select(TableEpisodes.path,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)
for removed_episode in removed_episodes:
episode_to_delete = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)\
.where(TableEpisodes.sonarrEpisodeId == removed_episode)\
.dicts()\
.get_or_none()
episode_to_delete = database.execute(stmt.where(TableEpisodes.sonarrEpisodeId == removed_episode)).first()
if not episode_to_delete:
continue
try:
TableEpisodes.delete().where(TableEpisodes.sonarrEpisodeId == removed_episode).execute()
database.execute(
delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == removed_episode))
except Exception as e:
logging.error(f"BAZARR cannot delete episode {episode_to_delete['path']} because of {e}")
logging.error(f"BAZARR cannot delete episode {episode_to_delete.path} because of {e}")
continue
else:
if send_event:
event_stream(type='episode', action='delete', payload=episode_to_delete['sonarrEpisodeId'])
event_stream(type='episode', action='delete', payload=episode_to_delete.sonarrEpisodeId)
# Update existing episodes in DB
episode_in_db_list = []
episodes_in_db = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.title,
TableEpisodes.path,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.sceneName,
TableEpisodes.monitored,
TableEpisodes.format,
TableEpisodes.resolution,
TableEpisodes.video_codec,
TableEpisodes.audio_codec,
TableEpisodes.episode_file_id,
TableEpisodes.audio_language,
TableEpisodes.file_size).dicts()
for item in episodes_in_db:
episode_in_db_list.append(item)
episodes_to_update_list = [i for i in episodes_to_update if i not in episode_in_db_list]
for updated_episode in episodes_to_update_list:
try:
TableEpisodes.update(updated_episode).where(TableEpisodes.sonarrEpisodeId ==
updated_episode['sonarrEpisodeId']).execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot update episode {updated_episode['path']} because of {e}")
for updated_episode in episodes_to_update:
if database.execute(
select(TableEpisodes)
.filter_by(**updated_episode))\
.first():
continue
else:
altered_episodes.append([updated_episode['sonarrEpisodeId'],
updated_episode['path'],
updated_episode['sonarrSeriesId']])
try:
database.execute(
update(TableEpisodes)
.values(updated_episode)
.where(TableEpisodes.sonarrEpisodeId == updated_episode['sonarrEpisodeId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update episode {updated_episode['path']} because of {e}")
continue
else:
altered_episodes.append([updated_episode['sonarrEpisodeId'],
updated_episode['path'],
updated_episode['sonarrSeriesId']])
if send_event:
event_stream(type='episode', action='update', payload=updated_episode['sonarrEpisodeId'])
# Insert new episodes in DB
for added_episode in episodes_to_add:
try:
result = TableEpisodes.insert(added_episode).on_conflict_ignore().execute()
database.execute(
insert(TableEpisodes)
.values(added_episode))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {added_episode['path']} because of {e}")
continue
else:
if result and result > 0:
altered_episodes.append([added_episode['sonarrEpisodeId'],
added_episode['path'],
added_episode['monitored']])
if send_event:
event_stream(type='episode', payload=added_episode['sonarrEpisodeId'])
else:
logging.debug('BAZARR unable to insert this episode into the database:{}'.format(
path_mappings.path_replace(added_episode['path'])))
altered_episodes.append([added_episode['sonarrEpisodeId'],
added_episode['path'],
added_episode['monitored']])
if send_event:
event_stream(type='episode', payload=added_episode['sonarrEpisodeId'])
# Store subtitles for added or modified episodes
for i, altered_episode in enumerate(altered_episodes, 1):
@ -177,10 +148,10 @@ def sync_one_episode(episode_id, defer_search=False):
apikey_sonarr = settings.sonarr.apikey
# Check if there's a row in database for this episode ID
existing_episode = TableEpisodes.select(TableEpisodes.path, TableEpisodes.episode_file_id)\
.where(TableEpisodes.sonarrEpisodeId == episode_id)\
.dicts()\
.get_or_none()
existing_episode = database.execute(
select(TableEpisodes.path, TableEpisodes.episode_file_id)
.where(TableEpisodes.sonarrEpisodeId == episode_id)) \
.first()
try:
# Get episode data from sonarr api
@ -207,20 +178,22 @@ def sync_one_episode(episode_id, defer_search=False):
# Remove episode from DB
if not episode and existing_episode:
try:
TableEpisodes.delete().where(TableEpisodes.sonarrEpisodeId == episode_id).execute()
except Exception as e:
logging.error(f"BAZARR cannot delete episode {existing_episode['path']} because of {e}")
else:
event_stream(type='episode', action='delete', payload=int(episode_id))
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
existing_episode['path'])))
return
database.execute(
delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == episode_id))
event_stream(type='episode', action='delete', payload=int(episode_id))
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
existing_episode['path'])))
return
# Update existing episodes in DB
elif episode and existing_episode:
try:
TableEpisodes.update(episode).where(TableEpisodes.sonarrEpisodeId == episode_id).execute()
database.execute(
update(TableEpisodes)
.values(episode)
.where(TableEpisodes.sonarrEpisodeId == episode_id))
except IntegrityError as e:
logging.error(f"BAZARR cannot update episode {episode['path']} because of {e}")
else:
@ -231,7 +204,9 @@ def sync_one_episode(episode_id, defer_search=False):
# Insert new episodes in DB
elif episode and not existing_episode:
try:
TableEpisodes.insert(episode).on_conflict(action='IGNORE').execute()
database.execute(
insert(TableEpisodes)
.values(episode))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {episode['path']} because of {e}")
else:

View File

@ -3,7 +3,7 @@
import os
from app.config import settings
from app.database import TableShows
from app.database import TableShows, database, select
from utilities.path_mappings import path_mappings
from utilities.video_analyzer import embedded_audio_reader
from sonarr.info import get_sonarr_info
@ -118,8 +118,10 @@ def episodeParser(episode):
if 'name' in item:
audio_language.append(item['name'])
else:
audio_language = TableShows.get(
TableShows.sonarrSeriesId == episode['seriesId']).audio_language
audio_language = database.execute(
select(TableShows.audio_language)
.where(TableShows.sonarrSeriesId == episode['seriesId']))\
.first().audio_language
if 'mediaInfo' in episode['episodeFile']:
if 'videoCodec' in episode['episodeFile']['mediaInfo']:

View File

@ -2,13 +2,13 @@
import logging
from peewee import IntegrityError
from sqlalchemy.exc import IntegrityError
from app.config import settings
from sonarr.info import url_sonarr
from subtitles.indexer.series import list_missing_subtitles
from sonarr.rootfolder import check_sonarr_rootfolder
from app.database import TableShows, TableEpisodes
from app.database import TableShows, database, insert, update, delete, select
from utilities.path_mappings import path_mappings
from app.event_handler import event_stream, show_progress, hide_progress
@ -41,12 +41,11 @@ def update_series(send_event=True):
return
else:
# Get current shows in DB
current_shows_db = TableShows.select(TableShows.sonarrSeriesId).dicts()
current_shows_db_list = [x['sonarrSeriesId'] for x in current_shows_db]
current_shows_db = [x.sonarrSeriesId for x in
database.execute(
select(TableShows.sonarrSeriesId))
.all()]
current_shows_sonarr = []
series_to_update = []
series_to_add = []
series_count = len(series)
for i, show in enumerate(series):
@ -60,82 +59,60 @@ def update_series(send_event=True):
# Add shows in Sonarr to current shows list
current_shows_sonarr.append(show['id'])
if show['id'] in current_shows_db_list:
series_to_update.append(seriesParser(show, action='update', tags_dict=tagsDict,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles))
if show['id'] in current_shows_db:
updated_series = seriesParser(show, action='update', tags_dict=tagsDict,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
if not database.execute(
select(TableShows)
.filter_by(**updated_series))\
.first():
try:
database.execute(
update(TableShows)
.values(updated_series)
.where(TableShows.sonarrSeriesId == show['id']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update series {updated_series['path']} because of {e}")
continue
if send_event:
event_stream(type='series', payload=show['id'])
else:
series_to_add.append(seriesParser(show, action='insert', tags_dict=tagsDict,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles))
added_series = seriesParser(show, action='insert', tags_dict=tagsDict,
serie_default_profile=serie_default_profile,
audio_profiles=audio_profiles)
try:
database.execute(
insert(TableShows)
.values(added_series))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert series {added_series['path']} because of {e}")
continue
else:
list_missing_subtitles(no=show['id'])
if send_event:
event_stream(type='series', action='update', payload=show['id'])
sync_episodes(series_id=show['id'], send_event=send_event)
# Remove old series from DB
removed_series = list(set(current_shows_db) - set(current_shows_sonarr))
for series in removed_series:
database.execute(
delete(TableShows)
.where(TableShows.sonarrSeriesId == series))
if send_event:
event_stream(type='series', action='delete', payload=series)
if send_event:
hide_progress(id='series_progress')
# Remove old series from DB
removed_series = list(set(current_shows_db_list) - set(current_shows_sonarr))
for series in removed_series:
try:
TableShows.delete().where(TableShows.sonarrSeriesId == series).execute()
except Exception as e:
logging.error(f"BAZARR cannot delete series with sonarrSeriesId {series} because of {e}")
continue
else:
if send_event:
event_stream(type='series', action='delete', payload=series)
# Update existing series in DB
series_in_db_list = []
series_in_db = TableShows.select(TableShows.title,
TableShows.path,
TableShows.tvdbId,
TableShows.sonarrSeriesId,
TableShows.overview,
TableShows.poster,
TableShows.fanart,
TableShows.audio_language,
TableShows.sortTitle,
TableShows.year,
TableShows.alternativeTitles,
TableShows.tags,
TableShows.seriesType,
TableShows.imdbId,
TableShows.monitored).dicts()
for item in series_in_db:
series_in_db_list.append(item)
series_to_update_list = [i for i in series_to_update if i not in series_in_db_list]
for updated_series in series_to_update_list:
try:
TableShows.update(updated_series).where(TableShows.sonarrSeriesId ==
updated_series['sonarrSeriesId']).execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot update series {updated_series['path']} because of {e}")
continue
else:
if send_event:
event_stream(type='series', payload=updated_series['sonarrSeriesId'])
# Insert new series in DB
for added_series in series_to_add:
try:
result = TableShows.insert(added_series).on_conflict(action='IGNORE').execute()
except IntegrityError as e:
logging.error(f"BAZARR cannot insert series {added_series['path']} because of {e}")
continue
else:
if result:
list_missing_subtitles(no=added_series['sonarrSeriesId'])
else:
logging.debug('BAZARR unable to insert this series into the database:',
path_mappings.path_replace(added_series['path']))
if send_event:
event_stream(type='series', action='update', payload=added_series['sonarrSeriesId'])
logging.debug('BAZARR All series synced from Sonarr into database.')
@ -143,21 +120,19 @@ def update_one_series(series_id, action):
logging.debug('BAZARR syncing this specific series from Sonarr: {}'.format(series_id))
# Check if there's a row in database for this series ID
existing_series = TableShows.select(TableShows.path)\
.where(TableShows.sonarrSeriesId == series_id)\
.dicts()\
.get_or_none()
existing_series = database.execute(
select(TableShows)
.where(TableShows.sonarrSeriesId == series_id))\
.first()
# Delete series from DB
if action == 'deleted' and existing_series:
try:
TableShows.delete().where(TableShows.sonarrSeriesId == int(series_id)).execute()
except Exception as e:
logging.error(f"BAZARR cannot delete series with sonarrSeriesId {series_id} because of {e}")
else:
TableEpisodes.delete().where(TableEpisodes.sonarrSeriesId == int(series_id)).execute()
event_stream(type='series', action='delete', payload=int(series_id))
return
database.execute(
delete(TableShows)
.where(TableShows.sonarrSeriesId == int(series_id)))
event_stream(type='series', action='delete', payload=int(series_id))
return
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
@ -196,7 +171,10 @@ def update_one_series(series_id, action):
# Update existing series in DB
if action == 'updated' and existing_series:
try:
TableShows.update(series).where(TableShows.sonarrSeriesId == series['sonarrSeriesId']).execute()
database.execute(
update(TableShows)
.values(series)
.where(TableShows.sonarrSeriesId == series['sonarrSeriesId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update series {series['path']} because of {e}")
else:
@ -208,7 +186,9 @@ def update_one_series(series_id, action):
# Insert new series in DB
elif action == 'updated' and not existing_series:
try:
TableShows.insert(series).on_conflict(action='IGNORE').execute()
database.execute(
insert(TableShows)
.values(series))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert series {series['path']} because of {e}")
else:

View File

@ -13,7 +13,7 @@ from subliminal_patch.core_persistent import download_best_subtitles
from subliminal_patch.score import ComputeScore
from app.config import settings, get_array_from, get_scores
from app.database import TableEpisodes, TableMovies
from app.database import TableEpisodes, TableMovies, database, select
from utilities.path_mappings import path_mappings
from utilities.helper import get_target_folder, force_unicode
from languages.get_languages import alpha3_from_alpha2
@ -163,15 +163,15 @@ def parse_language_object(language):
def check_missing_languages(path, media_type):
# confirm if language is still missing or if cutoff has been reached
if media_type == 'series':
confirmed_missing_subs = TableEpisodes.select(TableEpisodes.missing_subtitles) \
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)) \
.dicts() \
.get_or_none()
confirmed_missing_subs = database.execute(
select(TableEpisodes.missing_subtitles)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)))\
.first()
else:
confirmed_missing_subs = TableMovies.select(TableMovies.missing_subtitles) \
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)) \
.dicts() \
.get_or_none()
confirmed_missing_subs = database.execute(
select(TableMovies.missing_subtitles)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)))\
.first()
if not confirmed_missing_subs:
reversed_path = path_mappings.path_replace_reverse(path) if media_type == 'series' else \
@ -180,7 +180,7 @@ def check_missing_languages(path, media_type):
return []
languages = []
for language in ast.literal_eval(confirmed_missing_subs['missing_subtitles']):
for language in ast.literal_eval(confirmed_missing_subs.missing_subtitles):
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"

View File

@ -8,7 +8,8 @@ import ast
from subliminal_patch import core, search_external_subtitles
from languages.custom_lang import CustomLanguage
from app.database import get_profiles_list, get_profile_cutoff, TableMovies, get_audio_profile_languages
from app.database import get_profiles_list, get_profile_cutoff, TableMovies, get_audio_profile_languages, database, \
update, select
from languages.get_languages import alpha2_from_alpha3, get_language_set
from app.config import settings
from utilities.helper import get_subtitle_destination_folder
@ -26,17 +27,17 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
logging.debug("BAZARR is trying to index embedded subtitles.")
item = TableMovies.select(TableMovies.movie_file_id, TableMovies.file_size)\
.where(TableMovies.path == original_path)\
.dicts()\
.get_or_none()
item = database.execute(
select(TableMovies.movie_file_id, TableMovies.file_size)
.where(TableMovies.path == original_path)) \
.first()
if not item:
logging.exception(f"BAZARR error when trying to select this movie from database: {reversed_path}")
else:
try:
subtitle_languages = embedded_subs_reader(reversed_path,
file_size=item['file_size'],
movie_file_id=item['movie_file_id'],
file_size=item.file_size,
movie_file_id=item.movie_file_id,
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
@ -56,35 +57,35 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
lang = lang + ':hi'
logging.debug("BAZARR embedded subtitles detected: " + lang)
actual_subtitles.append([lang, None, None])
except Exception:
logging.debug("BAZARR unable to index this unrecognized language: " + subtitle_language)
pass
except Exception as error:
logging.debug("BAZARR unable to index this unrecognized language: %s (%s)",
subtitle_language, error)
except Exception:
logging.exception(
"BAZARR error when trying to analyze this %s file: %s" % (os.path.splitext(reversed_path)[1],
reversed_path))
pass
try:
dest_folder = get_subtitle_destination_folder() or ''
dest_folder = get_subtitle_destination_folder()
core.CUSTOM_PATHS = [dest_folder] if dest_folder else []
# get previously indexed subtitles that haven't changed:
item = TableMovies.select(TableMovies.subtitles) \
.where(TableMovies.path == original_path) \
.dicts() \
.get_or_none()
item = database.execute(
select(TableMovies.subtitles)
.where(TableMovies.path == original_path))\
.first()
if not item:
previously_indexed_subtitles_to_exclude = []
else:
previously_indexed_subtitles = ast.literal_eval(item['subtitles']) if item['subtitles'] else []
previously_indexed_subtitles = ast.literal_eval(item.subtitles) if item.subtitles else []
previously_indexed_subtitles_to_exclude = [x for x in previously_indexed_subtitles
if len(x) == 3 and
x[1] and
os.path.isfile(path_mappings.path_replace(x[1])) and
os.stat(path_mappings.path_replace(x[1])).st_size == x[2]]
subtitles = search_external_subtitles(reversed_path, languages=get_language_set())
subtitles = search_external_subtitles(reversed_path, languages=get_language_set(),
only_one=settings.general.getboolean('single_language'))
full_dest_folder_path = os.path.dirname(reversed_path)
if dest_folder:
if settings.general.subfolder == "absolute":
@ -95,7 +96,6 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
previously_indexed_subtitles_to_exclude)
except Exception:
logging.exception("BAZARR unable to index external subtitles.")
pass
else:
for subtitle, language in subtitles.items():
valid_language = False
@ -114,7 +114,8 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
custom = CustomLanguage.found_external(subtitle, subtitle_path)
if custom is not None:
actual_subtitles.append([custom, path_mappings.path_replace_reverse_movie(subtitle_path)])
actual_subtitles.append([custom, path_mappings.path_replace_reverse_movie(subtitle_path),
os.stat(subtitle_path).st_size])
elif str(language.basename) != 'und':
if language.forced:
@ -127,15 +128,19 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
actual_subtitles.append([language_str, path_mappings.path_replace_reverse_movie(subtitle_path),
os.stat(subtitle_path).st_size])
TableMovies.update({TableMovies.subtitles: str(actual_subtitles)})\
.where(TableMovies.path == original_path)\
.execute()
matching_movies = TableMovies.select(TableMovies.radarrId).where(TableMovies.path == original_path).dicts()
database.execute(
update(TableMovies)
.values(subtitles=str(actual_subtitles))
.where(TableMovies.path == original_path))
matching_movies = database.execute(
select(TableMovies.radarrId)
.where(TableMovies.path == original_path))\
.all()
for movie in matching_movies:
if movie:
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
list_missing_subtitles_movies(no=movie['radarrId'])
list_missing_subtitles_movies(no=movie.radarrId)
else:
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
else:
@ -147,39 +152,45 @@ def store_subtitles_movie(original_path, reversed_path, use_cache=True):
def list_missing_subtitles_movies(no=None, send_event=True):
movies_subtitles = TableMovies.select(TableMovies.radarrId,
TableMovies.subtitles,
TableMovies.profileId,
TableMovies.audio_language)\
.where((TableMovies.radarrId == no) if no else None)\
.dicts()
if isinstance(movies_subtitles, str):
logging.error("BAZARR list missing subtitles query to DB returned this instead of rows: " + movies_subtitles)
return
if no:
movies_subtitles = database.execute(
select(TableMovies.radarrId,
TableMovies.subtitles,
TableMovies.profileId,
TableMovies.audio_language)
.where(TableMovies.radarrId == no)) \
.all()
else:
movies_subtitles = database.execute(
select(TableMovies.radarrId,
TableMovies.subtitles,
TableMovies.profileId,
TableMovies.audio_language)) \
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
for movie_subtitles in movies_subtitles:
missing_subtitles_text = '[]'
if movie_subtitles['profileId']:
if movie_subtitles.profileId:
# get desired subtitles
desired_subtitles_temp = get_profiles_list(profile_id=movie_subtitles['profileId'])
desired_subtitles_temp = get_profiles_list(profile_id=movie_subtitles.profileId)
desired_subtitles_list = []
if desired_subtitles_temp:
for language in desired_subtitles_temp['items']:
if language['audio_exclude'] == "True":
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
movie_subtitles['audio_language'])):
movie_subtitles.audio_language)):
continue
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
# get existing subtitles
actual_subtitles_list = []
if movie_subtitles['subtitles'] is not None:
if movie_subtitles.subtitles is not None:
if use_embedded_subs:
actual_subtitles_temp = ast.literal_eval(movie_subtitles['subtitles'])
actual_subtitles_temp = ast.literal_eval(movie_subtitles.subtitles)
else:
actual_subtitles_temp = [x for x in ast.literal_eval(movie_subtitles['subtitles']) if x[1]]
actual_subtitles_temp = [x for x in ast.literal_eval(movie_subtitles.subtitles) if x[1]]
for subtitles in actual_subtitles_temp:
subtitles = subtitles[0].split(':')
@ -197,14 +208,14 @@ def list_missing_subtitles_movies(no=None, send_event=True):
# check if cutoff is reached and skip any further check
cutoff_met = False
cutoff_temp_list = get_profile_cutoff(profile_id=movie_subtitles['profileId'])
cutoff_temp_list = get_profile_cutoff(profile_id=movie_subtitles.profileId)
if cutoff_temp_list:
for cutoff_temp in cutoff_temp_list:
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
if cutoff_temp['audio_exclude'] == 'True' and \
any(x['code2'] == cutoff_temp['language'] for x in
get_audio_profile_languages(movie_subtitles['audio_language'])):
get_audio_profile_languages(movie_subtitles.audio_language)):
cutoff_met = True
elif cutoff_language in actual_subtitles_list:
cutoff_met = True
@ -241,19 +252,22 @@ def list_missing_subtitles_movies(no=None, send_event=True):
missing_subtitles_text = str(missing_subtitles_output_list)
TableMovies.update({TableMovies.missing_subtitles: missing_subtitles_text})\
.where(TableMovies.radarrId == movie_subtitles['radarrId'])\
.execute()
database.execute(
update(TableMovies)
.values(missing_subtitles=missing_subtitles_text)
.where(TableMovies.radarrId == movie_subtitles.radarrId))
if send_event:
event_stream(type='movie', payload=movie_subtitles['radarrId'])
event_stream(type='movie-wanted', action='update', payload=movie_subtitles['radarrId'])
event_stream(type='movie', payload=movie_subtitles.radarrId)
event_stream(type='movie-wanted', action='update', payload=movie_subtitles.radarrId)
if send_event:
event_stream(type='badges')
def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe_cache')):
movies = TableMovies.select(TableMovies.path).dicts()
movies = database.execute(
select(TableMovies.path))\
.all()
count_movies = len(movies)
for i, movie in enumerate(movies):
@ -262,7 +276,7 @@ def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe
name='Movies subtitles',
value=i,
count=count_movies)
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']), use_cache=use_cache)
store_subtitles_movie(movie.path, path_mappings.path_replace_movie(movie.path), use_cache=use_cache)
hide_progress(id='movies_disk_scan')
@ -270,10 +284,11 @@ def movies_full_scan_subtitles(use_cache=settings.radarr.getboolean('use_ffprobe
def movies_scan_subtitles(no):
movies = TableMovies.select(TableMovies.path)\
.where(TableMovies.radarrId == no)\
.order_by(TableMovies.radarrId)\
.dicts()
movies = database.execute(
select(TableMovies.path)
.where(TableMovies.radarrId == no)
.order_by(TableMovies.radarrId)) \
.all()
for movie in movies:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']), use_cache=False)
store_subtitles_movie(movie.path, path_mappings.path_replace_movie(movie.path), use_cache=False)

View File

@ -8,7 +8,8 @@ import ast
from subliminal_patch import core, search_external_subtitles
from languages.custom_lang import CustomLanguage
from app.database import get_profiles_list, get_profile_cutoff, TableEpisodes, TableShows, get_audio_profile_languages
from app.database import get_profiles_list, get_profile_cutoff, TableEpisodes, TableShows, \
get_audio_profile_languages, database, update, select
from languages.get_languages import alpha2_from_alpha3, get_language_set
from app.config import settings
from utilities.helper import get_subtitle_destination_folder
@ -26,17 +27,17 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
if os.path.exists(reversed_path):
if settings.general.getboolean('use_embedded_subs'):
logging.debug("BAZARR is trying to index embedded subtitles.")
item = TableEpisodes.select(TableEpisodes.episode_file_id, TableEpisodes.file_size)\
.where(TableEpisodes.path == original_path)\
.dicts()\
.get_or_none()
item = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.file_size)
.where(TableEpisodes.path == original_path))\
.first()
if not item:
logging.exception(f"BAZARR error when trying to select this episode from database: {reversed_path}")
else:
try:
subtitle_languages = embedded_subs_reader(reversed_path,
file_size=item['file_size'],
episode_file_id=item['episode_file_id'],
file_size=item.file_size,
episode_file_id=item.episode_file_id,
use_cache=use_cache)
for subtitle_language, subtitle_forced, subtitle_hi, subtitle_codec in subtitle_languages:
try:
@ -68,14 +69,14 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
core.CUSTOM_PATHS = [dest_folder] if dest_folder else []
# get previously indexed subtitles that haven't changed:
item = TableEpisodes.select(TableEpisodes.subtitles) \
.where(TableEpisodes.path == original_path) \
.dicts() \
.get_or_none()
item = database.execute(
select(TableEpisodes.subtitles)
.where(TableEpisodes.path == original_path)) \
.first()
if not item:
previously_indexed_subtitles_to_exclude = []
else:
previously_indexed_subtitles = ast.literal_eval(item['subtitles']) if item['subtitles'] else []
previously_indexed_subtitles = ast.literal_eval(item.subtitles) if item.subtitles else []
previously_indexed_subtitles_to_exclude = [x for x in previously_indexed_subtitles
if len(x) == 3 and
x[1] and
@ -112,9 +113,10 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
custom = CustomLanguage.found_external(subtitle, subtitle_path)
if custom is not None:
actual_subtitles.append([custom, path_mappings.path_replace_reverse(subtitle_path)])
actual_subtitles.append([custom, path_mappings.path_replace_reverse(subtitle_path),
os.stat(subtitle_path).st_size])
elif str(language) != 'und':
elif str(language.basename) != 'und':
if language.forced:
language_str = str(language)
elif language.hi:
@ -125,17 +127,19 @@ def store_subtitles(original_path, reversed_path, use_cache=True):
actual_subtitles.append([language_str, path_mappings.path_replace_reverse(subtitle_path),
os.stat(subtitle_path).st_size])
TableEpisodes.update({TableEpisodes.subtitles: str(actual_subtitles)})\
.where(TableEpisodes.path == original_path)\
.execute()
matching_episodes = TableEpisodes.select(TableEpisodes.sonarrEpisodeId, TableEpisodes.sonarrSeriesId)\
.where(TableEpisodes.path == original_path)\
.dicts()
database.execute(
update(TableEpisodes)
.values(subtitles=str(actual_subtitles))
.where(TableEpisodes.path == original_path))
matching_episodes = database.execute(
select(TableEpisodes.sonarrEpisodeId, TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.path == original_path))\
.all()
for episode in matching_episodes:
if episode:
logging.debug("BAZARR storing those languages to DB: " + str(actual_subtitles))
list_missing_subtitles(epno=episode['sonarrEpisodeId'])
list_missing_subtitles(epno=episode.sonarrEpisodeId)
else:
logging.debug("BAZARR haven't been able to update existing subtitles to DB : " + str(actual_subtitles))
else:
@ -153,41 +157,40 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
episodes_subtitles_clause = (TableEpisodes.sonarrSeriesId == no)
else:
episodes_subtitles_clause = None
episodes_subtitles = TableEpisodes.select(TableShows.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.subtitles,
TableShows.profileId,
TableEpisodes.audio_language)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(episodes_subtitles_clause)\
.dicts()
if isinstance(episodes_subtitles, str):
logging.error("BAZARR list missing subtitles query to DB returned this instead of rows: " + episodes_subtitles)
return
episodes_subtitles = database.execute(
select(TableShows.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.subtitles,
TableShows.profileId,
TableEpisodes.audio_language)
.select_from(TableEpisodes)
.join(TableShows)
.where(episodes_subtitles_clause))\
.all()
use_embedded_subs = settings.general.getboolean('use_embedded_subs')
for episode_subtitles in episodes_subtitles:
missing_subtitles_text = '[]'
if episode_subtitles['profileId']:
if episode_subtitles.profileId:
# get desired subtitles
desired_subtitles_temp = get_profiles_list(profile_id=episode_subtitles['profileId'])
desired_subtitles_temp = get_profiles_list(profile_id=episode_subtitles.profileId)
desired_subtitles_list = []
if desired_subtitles_temp:
for language in desired_subtitles_temp['items']:
if language['audio_exclude'] == "True":
if any(x['code2'] == language['language'] for x in get_audio_profile_languages(
episode_subtitles['audio_language'])):
episode_subtitles.audio_language)):
continue
desired_subtitles_list.append([language['language'], language['forced'], language['hi']])
# get existing subtitles
actual_subtitles_list = []
if episode_subtitles['subtitles'] is not None:
if episode_subtitles.subtitles is not None:
if use_embedded_subs:
actual_subtitles_temp = ast.literal_eval(episode_subtitles['subtitles'])
actual_subtitles_temp = ast.literal_eval(episode_subtitles.subtitles)
else:
actual_subtitles_temp = [x for x in ast.literal_eval(episode_subtitles['subtitles']) if x[1]]
actual_subtitles_temp = [x for x in ast.literal_eval(episode_subtitles.subtitles) if x[1]]
for subtitles in actual_subtitles_temp:
subtitles = subtitles[0].split(':')
@ -205,14 +208,14 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
# check if cutoff is reached and skip any further check
cutoff_met = False
cutoff_temp_list = get_profile_cutoff(profile_id=episode_subtitles['profileId'])
cutoff_temp_list = get_profile_cutoff(profile_id=episode_subtitles.profileId)
if cutoff_temp_list:
for cutoff_temp in cutoff_temp_list:
cutoff_language = [cutoff_temp['language'], cutoff_temp['forced'], cutoff_temp['hi']]
if cutoff_temp['audio_exclude'] == 'True' and \
any(x['code2'] == cutoff_temp['language'] for x in
get_audio_profile_languages(episode_subtitles['audio_language'])):
get_audio_profile_languages(episode_subtitles.audio_language)):
cutoff_met = True
elif cutoff_language in actual_subtitles_list:
cutoff_met = True
@ -251,19 +254,22 @@ def list_missing_subtitles(no=None, epno=None, send_event=True):
missing_subtitles_text = str(missing_subtitles_output_list)
TableEpisodes.update({TableEpisodes.missing_subtitles: missing_subtitles_text})\
.where(TableEpisodes.sonarrEpisodeId == episode_subtitles['sonarrEpisodeId'])\
.execute()
database.execute(
update(TableEpisodes)
.values(missing_subtitles=missing_subtitles_text)
.where(TableEpisodes.sonarrEpisodeId == episode_subtitles.sonarrEpisodeId))
if send_event:
event_stream(type='episode', payload=episode_subtitles['sonarrEpisodeId'])
event_stream(type='episode-wanted', action='update', payload=episode_subtitles['sonarrEpisodeId'])
event_stream(type='episode', payload=episode_subtitles.sonarrEpisodeId)
event_stream(type='episode-wanted', action='update', payload=episode_subtitles.sonarrEpisodeId)
if send_event:
event_stream(type='badges')
def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe_cache')):
episodes = TableEpisodes.select(TableEpisodes.path).dicts()
episodes = database.execute(
select(TableEpisodes.path))\
.all()
count_episodes = len(episodes)
for i, episode in enumerate(episodes):
@ -272,7 +278,7 @@ def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe
name='Episodes subtitles',
value=i,
count=count_episodes)
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=use_cache)
store_subtitles(episode.path, path_mappings.path_replace(episode.path), use_cache=use_cache)
hide_progress(id='episodes_disk_scan')
@ -280,10 +286,11 @@ def series_full_scan_subtitles(use_cache=settings.sonarr.getboolean('use_ffprobe
def series_scan_subtitles(no):
episodes = TableEpisodes.select(TableEpisodes.path)\
.where(TableEpisodes.sonarrSeriesId == no)\
.order_by(TableEpisodes.sonarrEpisodeId)\
.dicts()
episodes = database.execute(
select(TableEpisodes.path)
.where(TableEpisodes.sonarrSeriesId == no)
.order_by(TableEpisodes.sonarrEpisodeId))\
.all()
for episode in episodes:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']), use_cache=False)
store_subtitles(episode.path, path_mappings.path_replace(episode.path), use_cache=False)

View File

@ -77,7 +77,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
text = f.read()
encoding = detect(text)
if encoding and 'encoding' in encoding:
if encoding and 'encoding' in encoding and encoding['encoding']:
encoding = detect(text)['encoding']
else:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "
@ -129,7 +129,7 @@ def guess_external_subtitles(dest_folder, subtitles, media_type, previously_inde
text = f.read()
encoding = detect(text)
if encoding and 'encoding' in encoding:
if encoding and 'encoding' in encoding and encoding['encoding']:
encoding = detect(text)['encoding']
else:
logging.debug("BAZARR skipping this subtitles because we can't guess the encoding. "

View File

@ -41,7 +41,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
video = get_video(force_unicode(path), title, sceneName, providers=providers, media_type=media_type)
else:
logging.info("BAZARR All providers are throttled")
return None
return 'All providers are throttled'
if video:
try:
if providers:
@ -62,7 +62,7 @@ def manual_search(path, profile_id, providers, sceneName, title, media_type):
else:
subtitles = []
logging.info("BAZARR All providers are throttled")
return None
return 'All providers are throttled'
except Exception:
logging.exception("BAZARR Error trying to get Subtitle list from provider for this file: " + path)
else:
@ -183,14 +183,14 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
else:
logging.info("BAZARR All providers are throttled")
return None
return 'All providers are throttled'
except Exception:
logging.exception('BAZARR Error downloading Subtitles for this file ' + path)
return None
return 'Error downloading Subtitles'
else:
if not subtitle.is_valid():
logging.exception('BAZARR No valid Subtitles file found for this file: ' + path)
return
return 'No valid Subtitles file found'
try:
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
'win') and settings.general.getboolean('chmod_enabled') else None
@ -203,7 +203,7 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
path_decoder=force_unicode)
except Exception:
logging.exception('BAZARR Error saving Subtitles file to disk for this file:' + path)
return
return 'Error saving Subtitles file to disk'
else:
if saved_subtitles:
_, max_score, _ = _get_scores(media_type)
@ -221,7 +221,7 @@ def manual_download_subtitle(path, audio_language, hi, forced, subtitle, provide
"BAZARR Tried to manually download a Subtitles for file: " + path
+ " but we weren't able to do (probably throttled by " + str(subtitle.provider_name)
+ ". Please retry later or select a Subtitles from another provider.")
return None
return 'Something went wrong, check the logs for error'
subliminal.region.backend.sync()

View File

@ -4,6 +4,7 @@
import ast
import logging
import operator
import os
from functools import reduce
@ -12,7 +13,7 @@ from subtitles.indexer.movies import store_subtitles_movie
from radarr.history import history_log_movie
from app.notifier import send_notifications_movie
from app.get_providers import get_providers
from app.database import get_exclusion_clause, get_audio_profile_languages, TableMovies
from app.database import get_exclusion_clause, get_audio_profile_languages, TableMovies, database, select
from app.event_handler import show_progress, hide_progress
from ..download import generate_subtitles
@ -21,28 +22,32 @@ from ..download import generate_subtitles
def movies_download_subtitles(no):
conditions = [(TableMovies.radarrId == no)]
conditions += get_exclusion_clause('movie')
movies = TableMovies.select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.audio_language,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.title,
TableMovies.tags,
TableMovies.monitored)\
.where(reduce(operator.and_, conditions))\
.dicts()
if not len(movies):
movie = database.execute(
select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.audio_language,
TableMovies.radarrId,
TableMovies.sceneName,
TableMovies.title,
TableMovies.tags,
TableMovies.monitored)
.where(reduce(operator.and_, conditions))) \
.first()
if not len(movie):
logging.debug("BAZARR no movie with that radarrId can be found in database:", str(no))
return
else:
movie = movies[0]
if ast.literal_eval(movie['missing_subtitles']):
count_movie = len(ast.literal_eval(movie['missing_subtitles']))
moviePath = path_mappings.path_replace_movie(movie.path)
if not os.path.exists(moviePath):
raise OSError
if ast.literal_eval(movie.missing_subtitles):
count_movie = len(ast.literal_eval(movie.missing_subtitles))
else:
count_movie = 0
audio_language_list = get_audio_profile_languages(movie['audio_language'])
audio_language_list = get_audio_profile_languages(movie.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -50,7 +55,7 @@ def movies_download_subtitles(no):
languages = []
for language in ast.literal_eval(movie['missing_subtitles']):
for language in ast.literal_eval(movie.missing_subtitles):
providers_list = get_providers()
if providers_list:
@ -64,20 +69,20 @@ def movies_download_subtitles(no):
show_progress(id='movie_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name=movie['title'],
name=movie.title,
value=0,
count=count_movie)
for result in generate_subtitles(path_mappings.path_replace_movie(movie['path']),
for result in generate_subtitles(moviePath,
languages,
audio_language,
str(movie['sceneName']),
movie['title'],
str(movie.sceneName),
movie.title,
'movie',
check_if_still_required=True):
if result:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
store_subtitles_movie(movie.path, moviePath)
history_log_movie(1, no, result)
send_notifications_movie(no, result.message)

View File

@ -4,6 +4,7 @@
import ast
import logging
import operator
import os
from functools import reduce
@ -12,31 +13,41 @@ from subtitles.indexer.series import store_subtitles
from sonarr.history import history_log
from app.notifier import send_notifications
from app.get_providers import get_providers
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes, database, select
from app.event_handler import show_progress, hide_progress
from ..download import generate_subtitles
def series_download_subtitles(no):
series_row = database.execute(
select(TableShows.path)
.where(TableShows.sonarrSeriesId == no))\
.first()
if series_row and not os.path.exists(path_mappings.path_replace(series_row.path)):
raise OSError
conditions = [(TableEpisodes.sonarrSeriesId == no),
(TableEpisodes.missing_subtitles != '[]')]
conditions += get_exclusion_clause('series')
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.seriesType,
TableEpisodes.audio_language,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle')) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, conditions)) \
.dicts()
episodes_details = database.execute(
select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.seriesType,
TableEpisodes.audio_language,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.label('episodeTitle'))
.select_from(TableEpisodes)
.join(TableShows)
.where(reduce(operator.and_, conditions))) \
.all()
if not episodes_details:
logging.debug("BAZARR no episode for that sonarrSeriesId have been found in database or they have all been "
"ignored because of monitored status, series type or series tags: {}".format(no))
@ -50,21 +61,21 @@ def series_download_subtitles(no):
if providers_list:
show_progress(id='series_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
value=i,
count=count_episodes_details)
audio_language_list = get_audio_profile_languages(episode['audio_language'])
audio_language_list = get_audio_profile_languages(episode.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
for language in ast.literal_eval(episode.missing_subtitles):
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
@ -73,17 +84,17 @@ def series_download_subtitles(no):
if not languages:
continue
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
for result in generate_subtitles(path_mappings.path_replace(episode.path),
languages,
audio_language,
str(episode['sceneName']),
episode['title'],
str(episode.sceneName),
episode.title,
'series',
check_if_still_required=True):
if result:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, no, episode['sonarrEpisodeId'], result)
send_notifications(no, episode['sonarrEpisodeId'], result.message)
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, no, episode.sonarrEpisodeId, result)
send_notifications(no, episode.sonarrEpisodeId, result.message)
else:
logging.info("BAZARR All providers are throttled")
break
@ -94,22 +105,24 @@ def series_download_subtitles(no):
def episode_download_subtitles(no, send_progress=False):
conditions = [(TableEpisodes.sonarrEpisodeId == no)]
conditions += get_exclusion_clause('series')
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.title,
TableShows.sonarrSeriesId,
TableEpisodes.audio_language,
TableShows.seriesType,
TableEpisodes.title.alias('episodeTitle'),
TableEpisodes.season,
TableEpisodes.episode) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(reduce(operator.and_, conditions)) \
.dicts()
episodes_details = database.execute(
select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.monitored,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sceneName,
TableShows.tags,
TableShows.title,
TableShows.sonarrSeriesId,
TableEpisodes.audio_language,
TableShows.seriesType,
TableEpisodes.title.label('episodeTitle'),
TableEpisodes.season,
TableEpisodes.episode)
.select_from(TableEpisodes)
.join(TableShows)
.where(reduce(operator.and_, conditions))) \
.all()
if not episodes_details:
logging.debug("BAZARR no episode with that sonarrEpisodeId can be found in database:", str(no))
return
@ -121,21 +134,21 @@ def episode_download_subtitles(no, send_progress=False):
if send_progress:
show_progress(id='episode_search_progress_{}'.format(no),
header='Searching missing subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
value=0,
count=1)
audio_language_list = get_audio_profile_languages(episode['audio_language'])
audio_language_list = get_audio_profile_languages(episode.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
for language in ast.literal_eval(episode.missing_subtitles):
if language is not None:
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
@ -144,17 +157,17 @@ def episode_download_subtitles(no, send_progress=False):
if not languages:
continue
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
for result in generate_subtitles(path_mappings.path_replace(episode.path),
languages,
audio_language,
str(episode['sceneName']),
episode['title'],
str(episode.sceneName),
episode.title,
'series',
check_if_still_required=True):
if result:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, episode.sonarrSeriesId, episode.sonarrEpisodeId, result)
send_notifications(episode.sonarrSeriesId, episode.sonarrEpisodeId, result.message)
if send_progress:
hide_progress(id='episode_search_progress_{}'.format(no))

View File

@ -7,7 +7,7 @@ from app.config import settings
from utilities.path_mappings import path_mappings
from utilities.post_processing import pp_replace, set_chmod
from languages.get_languages import alpha2_from_alpha3, alpha2_from_language, alpha3_from_language, language_from_alpha3
from app.database import TableEpisodes, TableMovies
from app.database import TableEpisodes, TableMovies, database, select
from utilities.analytics import event_tracker
from radarr.notify import notify_radarr
from sonarr.notify import notify_sonarr
@ -15,17 +15,20 @@ from app.event_handler import event_stream
from .utils import _get_download_code3
from .post_processing import postprocessing
from .utils import _get_scores
class ProcessSubtitlesResult:
def __init__(self, message, reversed_path, downloaded_language_code2, downloaded_provider, score, forced,
subtitle_id, reversed_subtitles_path, hearing_impaired):
subtitle_id, reversed_subtitles_path, hearing_impaired, matched=None, not_matched=None):
self.message = message
self.path = reversed_path
self.provider = downloaded_provider
self.score = score
self.subs_id = subtitle_id
self.subs_path = reversed_subtitles_path
self.matched = matched
self.not_matched = not_matched
if hearing_impaired:
self.language_code = downloaded_language_code2 + ":hi"
@ -67,39 +70,38 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
downloaded_provider + " with a score of " + str(percent_score) + "%."
if media_type == 'series':
episode_metadata = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId) \
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)) \
.dicts() \
.get_or_none()
episode_metadata = database.execute(
select(TableEpisodes.sonarrSeriesId, TableEpisodes.sonarrEpisodeId)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)))\
.first()
if not episode_metadata:
return
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
series_id = episode_metadata.sonarrSeriesId
episode_id = episode_metadata.sonarrEpisodeId
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
sonarr_series_id=episode_metadata['sonarrSeriesId'],
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
sonarr_series_id=episode_metadata.sonarrSeriesId,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
else:
movie_metadata = TableMovies.select(TableMovies.radarrId) \
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)) \
.dicts() \
.get_or_none()
movie_metadata = database.execute(
select(TableMovies.radarrId)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)))\
.first()
if not movie_metadata:
return
series_id = ""
episode_id = movie_metadata['radarrId']
episode_id = movie_metadata.radarrId
from .sync import sync_subtitles
sync_subtitles(video_path=path, srt_path=downloaded_path,
forced=subtitle.language.forced,
srt_lang=downloaded_language_code2, media_type=media_type,
percent_score=percent_score,
radarr_id=movie_metadata['radarrId'])
radarr_id=movie_metadata.radarrId)
if use_postprocessing is True:
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language, downloaded_language_code2,
@ -124,18 +126,18 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)
reversed_subtitles_path = path_mappings.path_replace_reverse(downloaded_path)
notify_sonarr(episode_metadata['sonarrSeriesId'])
event_stream(type='series', action='update', payload=episode_metadata['sonarrSeriesId'])
notify_sonarr(episode_metadata.sonarrSeriesId)
event_stream(type='series', action='update', payload=episode_metadata.sonarrSeriesId)
event_stream(type='episode-wanted', action='delete',
payload=episode_metadata['sonarrEpisodeId'])
payload=episode_metadata.sonarrEpisodeId)
else:
reversed_path = path_mappings.path_replace_reverse_movie(path)
reversed_subtitles_path = path_mappings.path_replace_reverse_movie(downloaded_path)
notify_radarr(movie_metadata['radarrId'])
event_stream(type='movie-wanted', action='delete', payload=movie_metadata['radarrId'])
notify_radarr(movie_metadata.radarrId)
event_stream(type='movie-wanted', action='delete', payload=movie_metadata.radarrId)
event_tracker.track(provider=downloaded_provider, action=action, language=downloaded_language)
event_tracker.track_subtitles(provider=downloaded_provider, action=action, language=downloaded_language)
return ProcessSubtitlesResult(message=message,
reversed_path=reversed_path,
@ -145,4 +147,15 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
forced=subtitle.language.forced,
subtitle_id=subtitle.id,
reversed_subtitles_path=reversed_subtitles_path,
hearing_impaired=subtitle.language.hi)
hearing_impaired=subtitle.language.hi,
matched=list(subtitle.matches or []),
not_matched=_get_not_matched(subtitle, media_type))
def _get_not_matched(subtitle, media_type):
_, _, scores = _get_scores(media_type)
if 'hash' not in subtitle.matches:
return list(set(scores) - set(subtitle.matches))
else:
return []

View File

@ -7,7 +7,7 @@ import re
from subliminal import Episode, Movie
from utilities.path_mappings import path_mappings
from app.database import TableShows, TableEpisodes, TableMovies
from app.database import TableShows, TableEpisodes, TableMovies, database, select
from .utils import convert_to_guessit
@ -17,84 +17,85 @@ _TITLE_RE = re.compile(r'\s(\(\d{4}\))')
def refine_from_db(path, video):
if isinstance(video, Episode):
data = TableEpisodes.select(TableShows.title.alias('seriesTitle'),
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle'),
TableShows.year,
TableShows.tvdbId,
TableShows.alternativeTitles,
TableEpisodes.format,
TableEpisodes.resolution,
TableEpisodes.video_codec,
TableEpisodes.audio_codec,
TableEpisodes.path,
TableShows.imdbId)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where((TableEpisodes.path == path_mappings.path_replace_reverse(path)))\
.dicts()
data = database.execute(
select(TableShows.title.label('seriesTitle'),
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.label('episodeTitle'),
TableShows.year,
TableShows.tvdbId,
TableShows.alternativeTitles,
TableEpisodes.format,
TableEpisodes.resolution,
TableEpisodes.video_codec,
TableEpisodes.audio_codec,
TableEpisodes.path,
TableShows.imdbId)
.select_from(TableEpisodes)
.join(TableShows)
.where((TableEpisodes.path == path_mappings.path_replace_reverse(path)))) \
.first()
if len(data):
data = data[0]
video.series = _TITLE_RE.sub('', data['seriesTitle'])
video.season = int(data['season'])
video.episode = int(data['episode'])
video.title = data['episodeTitle']
if data:
video.series = _TITLE_RE.sub('', data.seriesTitle)
video.season = int(data.season)
video.episode = int(data.episode)
video.title = data.episodeTitle
# Only refine year as a fallback
if not video.year and data['year']:
if int(data['year']) > 0:
video.year = int(data['year'])
if not video.year and data.year:
if int(data.year) > 0:
video.year = int(data.year)
video.series_tvdb_id = int(data['tvdbId'])
video.alternative_series = ast.literal_eval(data['alternativeTitles'])
if data['imdbId'] and not video.series_imdb_id:
video.series_imdb_id = data['imdbId']
video.series_tvdb_id = int(data.tvdbId)
video.alternative_series = ast.literal_eval(data.alternativeTitles)
if data.imdbId and not video.series_imdb_id:
video.series_imdb_id = data.imdbId
if not video.source:
video.source = convert_to_guessit('source', str(data['format']))
video.source = convert_to_guessit('source', str(data.format))
if not video.resolution:
video.resolution = str(data['resolution'])
video.resolution = str(data.resolution)
if not video.video_codec:
if data['video_codec']:
video.video_codec = convert_to_guessit('video_codec', data['video_codec'])
if data.video_codec:
video.video_codec = convert_to_guessit('video_codec', data.video_codec)
if not video.audio_codec:
if data['audio_codec']:
video.audio_codec = convert_to_guessit('audio_codec', data['audio_codec'])
if data.audio_codec:
video.audio_codec = convert_to_guessit('audio_codec', data.audio_codec)
elif isinstance(video, Movie):
data = TableMovies.select(TableMovies.title,
TableMovies.year,
TableMovies.alternativeTitles,
TableMovies.format,
TableMovies.resolution,
TableMovies.video_codec,
TableMovies.audio_codec,
TableMovies.imdbId)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()
data = database.execute(
select(TableMovies.title,
TableMovies.year,
TableMovies.alternativeTitles,
TableMovies.format,
TableMovies.resolution,
TableMovies.video_codec,
TableMovies.audio_codec,
TableMovies.imdbId)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))) \
.first()
if len(data):
data = data[0]
video.title = _TITLE_RE.sub('', data['title'])
if data:
video.title = _TITLE_RE.sub('', data.title)
# Only refine year as a fallback
if not video.year and data['year']:
if int(data['year']) > 0:
video.year = int(data['year'])
if not video.year and data.year:
if int(data.year) > 0:
video.year = int(data.year)
if data['imdbId'] and not video.imdb_id:
video.imdb_id = data['imdbId']
video.alternative_titles = ast.literal_eval(data['alternativeTitles'])
if data.imdbId and not video.imdb_id:
video.imdb_id = data.imdbId
video.alternative_titles = ast.literal_eval(data.alternativeTitles)
if not video.source:
if data['format']:
video.source = convert_to_guessit('source', data['format'])
if data.format:
video.source = convert_to_guessit('source', data.format)
if not video.resolution:
if data['resolution']:
video.resolution = data['resolution']
if data.resolution:
video.resolution = data.resolution
if not video.video_codec:
if data['video_codec']:
video.video_codec = convert_to_guessit('video_codec', data['video_codec'])
if data.video_codec:
video.video_codec = convert_to_guessit('video_codec', data.video_codec)
if not video.audio_codec:
if data['audio_codec']:
video.audio_codec = convert_to_guessit('audio_codec', data['audio_codec'])
if data.audio_codec:
video.audio_codec = convert_to_guessit('audio_codec', data.audio_codec)
return video

View File

@ -6,31 +6,31 @@ import logging
from subliminal import Movie
from utilities.path_mappings import path_mappings
from app.database import TableEpisodes, TableMovies
from app.database import TableEpisodes, TableMovies, database, select
from utilities.video_analyzer import parse_video_metadata
def refine_from_ffprobe(path, video):
if isinstance(video, Movie):
file_id = TableMovies.select(TableMovies.movie_file_id, TableMovies.file_size)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))\
.dicts()\
.get_or_none()
file_id = database.execute(
select(TableMovies.movie_file_id, TableMovies.file_size)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))) \
.first()
else:
file_id = TableEpisodes.select(TableEpisodes.episode_file_id, TableEpisodes.file_size)\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path))\
.dicts()\
.get_or_none()
file_id = database.execute(
select(TableEpisodes.episode_file_id, TableEpisodes.file_size)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)))\
.first()
if not file_id:
return video
if isinstance(video, Movie):
data = parse_video_metadata(file=path, file_size=file_id['file_size'],
movie_file_id=file_id['movie_file_id'])
data = parse_video_metadata(file=path, file_size=file_id.file_size,
movie_file_id=file_id.movie_file_id)
else:
data = parse_video_metadata(file=path, file_size=file_id['file_size'],
episode_file_id=file_id['episode_file_id'])
data = parse_video_metadata(file=path, file_size=file_id.file_size,
episode_file_id=file_id.episode_file_id)
if not data or ('ffprobe' not in data and 'mediainfo' not in data):
logging.debug("No cache available for this file: {}".format(path))

View File

@ -3,145 +3,15 @@
from __future__ import annotations
import logging
import re
from app.config import get_settings
from app.database import TableCustomScoreProfileConditions as conditions_table, TableCustomScoreProfiles as \
profiles_table
logger = logging.getLogger(__name__)
class Condition:
"""Base class for score conditions. Every condition can take the amount
of attributes needed from a subtitle object in order to find a match."""
type = None
against = ()
# {type: provider, value: subdivx, required: False, negate: False}
def __init__(self, value: str, required=False, negate=False, **kwargs):
self._value = str(value)
self._negate = negate
self.required = required
@classmethod
def from_dict(cls, item: dict) -> Condition:
"""A factory method to create a condition object from a database
dictionary."""
try:
new = _registered_conditions[item["type"]]
except IndexError:
raise NotImplementedError(f"{item} condition doesn't have a class.")
return new(**item)
def check(self, subtitle) -> bool:
"""Check if the condition is met against a Subtitle object. **May be implemented
in a subclass**."""
to_match = [str(getattr(subtitle, name, None)) for name in self.against]
met = any(item == self._value for item in to_match)
if met and not self._negate:
return True
return not met and self._negate
def __repr__(self) -> str:
return f"<Condition {self.type}={self._value} (r:{self.required} n:{self._negate})>"
class ProviderCondition(Condition):
type = "provider"
against = ("provider_name",)
class UploaderCondition(Condition):
type = "uploader"
against = ("uploader",)
class LanguageCondition(Condition):
type = "language"
against = ("language",)
class RegexCondition(Condition):
type = "regex"
against = ("release_info", "filename")
def check(self, subtitle):
to_match = [str(getattr(subtitle, name, None)) for name in self.against]
met = re.search(rf"{self._value}", "".join(to_match)) is not None
if met and not self._negate:
return True
return not met and self._negate
class CustomScoreProfile:
table = profiles_table
conditions_table = conditions_table
def __init__(self, id=None, name=None, score=0, media=None):
self.id = id
self.name = name or "N/A"
self.score = score
self.media = media
self._conditions = []
self._conditions_loaded = False
def load_conditions(self):
try:
self._conditions = [
Condition.from_dict(item)
for item in self.conditions_table.select()
.where(self.conditions_table.profile_id == self.id)
.dicts()
]
except self.conditions_table.DoesNotExist:
logger.debug("Conditions not found for %s", self)
self._conditions = []
self._conditions_loaded = True
def check(self, subtitle):
# Avoid calling the database on every score check
if not self._conditions_loaded:
self.load_conditions()
# Always return False if no conditions are set
if not self._conditions:
logger.debug("No conditions found in db for %s", self)
return False
return self._check_conditions(subtitle)
def _check_conditions(self, subtitle):
logger.debug("Checking conditions for %s profile", self)
matches = []
for condition in self._conditions:
matched = condition.check(subtitle)
if matched is True:
logger.debug("%s Condition met", condition)
matches.append(True)
elif condition.required and not matched:
logger.debug("%s not met, discarding profile", condition)
return False
met = True in matches
logger.debug("Profile conditions met? %s", met)
return met
def __repr__(self):
return f"<ScoreProfile {self.name} (score: {self.score})>"
class Score:
media = None
defaults = {}
profiles_table = profiles_table
def __init__(self, load_profiles=False, **kwargs):
self.data = self.defaults.copy()
@ -162,17 +32,11 @@ class Score:
matches.add(profile.name)
def load_profiles(self):
"""Load the profiles associated with the class. This method must be called
after every custom profile creation or update."""
try:
self._profiles = [
CustomScoreProfile(**item)
for item in self.profiles_table.select()
.where(self.profiles_table.media == self.media)
.dicts()
]
self._profiles = []
if self._profiles:
logger.debug("Loaded profiles: %s", self._profiles)
except self.profiles_table.DoesNotExist:
else:
logger.debug("No score profiles found")
self._profiles = []
@ -272,12 +136,5 @@ class MovieScore(Score):
self.data.update(kwargs["movie_scores"])
_registered_conditions = {
"provider": ProviderCondition,
"uploader": UploaderCondition,
"language": LanguageCondition,
"regex": RegexCondition,
}
series_score = SeriesScore.from_config(**get_settings())
movie_score = MovieScore.from_config(**get_settings())

View File

@ -52,24 +52,25 @@ class SubSyncer:
logging.debug('BAZARR FFmpeg used is %s', ffmpeg_exe)
self.ffmpeg_path = os.path.dirname(ffmpeg_exe)
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path, '--vad',
self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.getboolean('force_audio'):
unparsed_args.append('--no-fix-framerate')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.getboolean('debug'):
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
if os.path.isfile(self.srtout):
os.remove(self.srtout)
logging.debug('BAZARR deleted the previous subtitles synchronization attempt file.')
try:
unparsed_args = [self.reference, '-i', self.srtin, '-o', self.srtout, '--ffmpegpath', self.ffmpeg_path,
'--vad', self.vad, '--log-dir-path', self.log_dir_path]
if settings.subsync.getboolean('force_audio'):
unparsed_args.append('--no-fix-framerate')
unparsed_args.append('--reference-stream')
unparsed_args.append('a:0')
if settings.subsync.getboolean('debug'):
unparsed_args.append('--make-test-case')
parser = make_parser()
self.args = parser.parse_args(args=unparsed_args)
if os.path.isfile(self.srtout):
os.remove(self.srtout)
logging.debug('BAZARR deleted the previous subtitles synchronization attempt file.')
result = run(self.args)
except Exception:
logging.exception('BAZARR an exception occurs during the synchronization process for this subtitles: '
'{0}'.format(self.srtin))
raise OSError
else:
if settings.subsync.getboolean('debug'):
return result

View File

@ -81,7 +81,11 @@ def translate_subtitles_file(video_path, source_srt_file, from_lang, to_lang, fo
except IndexError:
logging.error(f'BAZARR is unable to translate malformed subtitles: {source_srt_file}')
return False
subs.save(dest_srt_file)
try:
subs.save(dest_srt_file)
except OSError:
logging.error(f'BAZARR is unable to save translated subtitles to {dest_srt_file}')
raise OSError
message = f"{language_from_alpha2(from_lang)} subtitles translated to {language_from_alpha3(to_lang)}."

View File

@ -3,13 +3,15 @@
import logging
import operator
import os
import ast
from datetime import datetime, timedelta
from functools import reduce
from sqlalchemy import and_
from app.config import settings
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes, TableMovies, \
TableHistory, TableHistoryMovie, get_profiles_list
TableHistory, TableHistoryMovie, database, select, func, get_profiles_list
from app.event_handler import show_progress, hide_progress
from app.get_providers import get_providers
from app.notifier import send_notifications, send_notifications_movie
@ -27,9 +29,60 @@ def upgrade_subtitles():
if use_sonarr:
episodes_to_upgrade = get_upgradable_episode_subtitles()
count_episode_to_upgrade = len(episodes_to_upgrade)
episodes_data = [{
'id': x.id,
'seriesTitle': x.seriesTitle,
'season': x.season,
'episode': x.episode,
'title': x.title,
'language': x.language,
'audio_language': x.audio_language,
'video_path': x.video_path,
'sceneName': x.sceneName,
'score': x.score,
'sonarrEpisodeId': x.sonarrEpisodeId,
'sonarrSeriesId': x.sonarrSeriesId,
'subtitles_path': x.subtitles_path,
'path': x.path,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'upgradable': bool(x.upgradable),
} for x in database.execute(
select(TableHistory.id,
TableShows.title.label('seriesTitle'),
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title,
TableHistory.language,
TableEpisodes.audio_language,
TableHistory.video_path,
TableEpisodes.sceneName,
TableHistory.score,
TableHistory.sonarrEpisodeId,
TableHistory.sonarrSeriesId,
TableHistory.subtitles_path,
TableEpisodes.path,
TableShows.profileId,
TableEpisodes.subtitles.label('external_subtitles'),
episodes_to_upgrade.c.id.label('upgradable'))
.select_from(TableHistory)
.join(TableShows, onclause=TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)
.join(TableEpisodes, onclause=TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)
.join(episodes_to_upgrade, onclause=TableHistory.id == episodes_to_upgrade.c.id, isouter=True)
.where(episodes_to_upgrade.c.id.is_not(None)))
.all() if _language_still_desired(x.language, x.profileId)]
for i, episode in enumerate(episodes_to_upgrade):
for item in episodes_data:
if item['upgradable']:
if item['subtitles_path'] not in item['external_subtitles'] or \
not item['video_path'] == item['path']:
item.update({"upgradable": False})
del item['path']
del item['external_subtitles']
count_episode_to_upgrade = len(episodes_data)
for i, episode in enumerate(episodes_data):
providers_list = get_providers()
show_progress(id='upgrade_episodes_progress',
@ -72,9 +125,49 @@ def upgrade_subtitles():
if use_radarr:
movies_to_upgrade = get_upgradable_movies_subtitles()
count_movie_to_upgrade = len(movies_to_upgrade)
movies_data = [{
'title': x.title,
'language': x.language,
'audio_language': x.audio_language,
'video_path': x.video_path,
'sceneName': x.sceneName,
'score': x.score,
'radarrId': x.radarrId,
'path': x.path,
'subtitles_path': x.subtitles_path,
'external_subtitles': [y[1] for y in ast.literal_eval(x.external_subtitles) if y[1]],
'upgradable': bool(x.upgradable),
} for x in database.execute(
select(TableMovies.title,
TableHistoryMovie.language,
TableMovies.audio_language,
TableHistoryMovie.video_path,
TableMovies.sceneName,
TableHistoryMovie.score,
TableHistoryMovie.radarrId,
TableHistoryMovie.subtitles_path,
TableMovies.path,
TableMovies.profileId,
TableMovies.subtitles.label('external_subtitles'),
movies_to_upgrade.c.id.label('upgradable'))
.select_from(TableHistoryMovie)
.join(TableMovies, onclause=TableHistoryMovie.radarrId == TableMovies.radarrId)
.join(movies_to_upgrade, onclause=TableHistoryMovie.id == movies_to_upgrade.c.id, isouter=True)
.where(movies_to_upgrade.c.id.is_not(None)))
.all() if _language_still_desired(x.language, x.profileId)]
for i, movie in enumerate(movies_to_upgrade):
for item in movies_data:
if item['upgradable']:
if item['subtitles_path'] not in item['external_subtitles'] or \
not item['video_path'] == item['path']:
item.update({"upgradable": False})
del item['path']
del item['external_subtitles']
count_movie_to_upgrade = len(movies_data)
for i, movie in enumerate(movies_data):
providers_list = get_providers()
show_progress(id='upgrade_movies_progress',
@ -127,45 +220,6 @@ def get_queries_condition_parameters():
return [minimum_timestamp, query_actions]
def parse_upgradable_list(upgradable_list, perfect_score, media_type):
if media_type == 'series':
path_replace_method = path_mappings.path_replace
else:
path_replace_method = path_mappings.path_replace_movie
items_to_upgrade = []
for item in upgradable_list:
logging.debug(f"Trying to validate eligibility to upgrade for this subtitles: "
f"{item['subtitles_path']}")
if not os.path.exists(path_replace_method(item['subtitles_path'])):
logging.debug("Subtitles file doesn't exists anymore, we skip this one.")
continue
if (item['video_path'], item['language']) in \
[(x['video_path'], x['language']) for x in items_to_upgrade]:
logging.debug("Newer video path and subtitles language combination already in list of subtitles to "
"upgrade, we skip this one.")
continue
if os.path.exists(path_replace_method(item['subtitles_path'])) and \
os.path.exists(path_replace_method(item['video_path'])):
logging.debug("Video and subtitles file are still there, we continue the eligibility validation.")
pass
items_to_upgrade.append(item)
if not settings.general.getboolean('upgrade_manual'):
logging.debug("Removing history items for manually downloaded or translated subtitles.")
items_to_upgrade = [x for x in items_to_upgrade if x['action'] in [2, 4, 6]]
logging.debug("Removing history items for already perfectly scored subtitles.")
items_to_upgrade = [x for x in items_to_upgrade if x['score'] < perfect_score]
logging.debug(f"Bazarr will try to upgrade {len(items_to_upgrade)} subtitles.")
return items_to_upgrade
def parse_language_string(language_string):
if language_string.endswith('forced'):
language = language_string.split(':')[0]
@ -185,84 +239,67 @@ def parse_language_string(language_string):
def get_upgradable_episode_subtitles():
if not settings.general.getboolean('upgrade_subs'):
return []
# return an empty set of rows
return select(TableHistory.id) \
.where(TableHistory.id.is_(None)) \
.subquery()
max_id_timestamp = select(TableHistory.video_path,
TableHistory.language,
func.max(TableHistory.timestamp).label('timestamp')) \
.group_by(TableHistory.video_path, TableHistory.language) \
.distinct() \
.subquery()
minimum_timestamp, query_actions = get_queries_condition_parameters()
upgradable_episodes_conditions = [(TableHistory.action << query_actions),
upgradable_episodes_conditions = [(TableHistory.action.in_(query_actions)),
(TableHistory.timestamp > minimum_timestamp),
(TableHistory.score.is_null(False))]
TableHistory.score.is_not(None),
(TableHistory.score < 357)]
upgradable_episodes_conditions += get_exclusion_clause('series')
upgradable_episodes = TableHistory.select(TableHistory.video_path,
TableHistory.language,
TableHistory.score,
TableShows.tags,
TableShows.profileId,
TableEpisodes.audio_language,
TableEpisodes.sceneName,
TableEpisodes.title,
TableEpisodes.sonarrSeriesId,
TableHistory.action,
TableHistory.subtitles_path,
TableEpisodes.sonarrEpisodeId,
TableHistory.timestamp.alias('timestamp'),
TableEpisodes.monitored,
TableEpisodes.season,
TableEpisodes.episode,
TableShows.title.alias('seriesTitle'),
TableShows.seriesType) \
.join(TableShows, on=(TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.join(TableEpisodes, on=(TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId)) \
return select(TableHistory.id)\
.select_from(TableHistory) \
.join(max_id_timestamp, onclause=and_(TableHistory.video_path == max_id_timestamp.c.video_path,
TableHistory.language == max_id_timestamp.c.language,
max_id_timestamp.c.timestamp == TableHistory.timestamp)) \
.join(TableShows, onclause=TableHistory.sonarrSeriesId == TableShows.sonarrSeriesId) \
.join(TableEpisodes, onclause=TableHistory.sonarrEpisodeId == TableEpisodes.sonarrEpisodeId) \
.where(reduce(operator.and_, upgradable_episodes_conditions)) \
.order_by(TableHistory.timestamp.desc()) \
.dicts()
if not upgradable_episodes:
return []
else:
upgradable_episodes = [x for x in upgradable_episodes if _language_still_desired(x['language'], x['profileId'])]
logging.debug(f"{len(upgradable_episodes)} potentially upgradable episode subtitles have been found, let's "
f"filter them...")
return parse_upgradable_list(upgradable_list=upgradable_episodes, perfect_score=357, media_type='series')
.order_by(TableHistory.timestamp.desc())\
.subquery()
def get_upgradable_movies_subtitles():
if not settings.general.getboolean('upgrade_subs'):
return []
# return an empty set of rows
return select(TableHistoryMovie.id) \
.where(TableHistoryMovie.id.is_(None)) \
.subquery()
max_id_timestamp = select(TableHistoryMovie.video_path,
TableHistoryMovie.language,
func.max(TableHistoryMovie.timestamp).label('timestamp')) \
.group_by(TableHistoryMovie.video_path, TableHistoryMovie.language) \
.distinct() \
.subquery()
minimum_timestamp, query_actions = get_queries_condition_parameters()
upgradable_movies_conditions = [(TableHistoryMovie.action << query_actions),
upgradable_movies_conditions = [(TableHistoryMovie.action.in_(query_actions)),
(TableHistoryMovie.timestamp > minimum_timestamp),
(TableHistoryMovie.score.is_null(False))]
TableHistoryMovie.score.is_not(None),
(TableHistoryMovie.score < 117)]
upgradable_movies_conditions += get_exclusion_clause('movie')
upgradable_movies = TableHistoryMovie.select(TableHistoryMovie.video_path,
TableHistoryMovie.language,
TableHistoryMovie.score,
TableMovies.profileId,
TableHistoryMovie.action,
TableHistoryMovie.subtitles_path,
TableMovies.audio_language,
TableMovies.sceneName,
TableHistoryMovie.timestamp.alias('timestamp'),
TableMovies.monitored,
TableMovies.tags,
TableMovies.radarrId,
TableMovies.title) \
.join(TableMovies, on=(TableHistoryMovie.radarrId == TableMovies.radarrId)) \
return select(TableHistoryMovie.id) \
.select_from(TableHistoryMovie) \
.join(max_id_timestamp, onclause=and_(TableHistoryMovie.video_path == max_id_timestamp.c.video_path,
TableHistoryMovie.language == max_id_timestamp.c.language,
max_id_timestamp.c.timestamp == TableHistoryMovie.timestamp)) \
.join(TableMovies, onclause=TableHistoryMovie.radarrId == TableMovies.radarrId) \
.where(reduce(operator.and_, upgradable_movies_conditions)) \
.order_by(TableHistoryMovie.timestamp.desc()) \
.dicts()
if not upgradable_movies:
return []
else:
upgradable_movies = [x for x in upgradable_movies if _language_still_desired(x['language'], x['profileId'])]
logging.debug(f"{len(upgradable_movies)} potentially upgradable movie subtitles have been found, let's filter "
f"them...")
return parse_upgradable_list(upgradable_list=upgradable_movies, perfect_score=117, media_type='movie')
.subquery()
def _language_still_desired(language, profile_id):

View File

@ -18,7 +18,7 @@ from utilities.path_mappings import path_mappings
from radarr.notify import notify_radarr
from sonarr.notify import notify_sonarr
from languages.custom_lang import CustomLanguage
from app.database import TableEpisodes, TableMovies, TableShows, get_profiles_list
from app.database import TableEpisodes, TableMovies, TableShows, get_profiles_list, database, select
from app.event_handler import event_stream
from subtitles.processing import ProcessSubtitlesResult
@ -52,26 +52,27 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
lang_obj = Language.rebuild(lang_obj, forced=True)
if media_type == 'series':
episode_metadata = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableShows.profileId) \
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId)) \
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path)) \
.dicts() \
.get_or_none()
episode_metadata = database.execute(
select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableShows.profileId)
.select_from(TableEpisodes)
.join(TableShows)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(path))) \
.first()
if episode_metadata:
use_original_format = bool(get_profiles_list(episode_metadata["profileId"])["originalFormat"])
use_original_format = bool(get_profiles_list(episode_metadata.profileId)["originalFormat"])
else:
use_original_format = False
else:
movie_metadata = TableMovies.select(TableMovies.radarrId, TableMovies.profileId) \
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path)) \
.dicts() \
.get_or_none()
movie_metadata = database.execute(
select(TableMovies.radarrId, TableMovies.profileId)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(path))) \
.first()
if movie_metadata:
use_original_format = bool(get_profiles_list(movie_metadata["profileId"])["originalFormat"])
use_original_format = bool(get_profiles_list(movie_metadata.profileId)["originalFormat"])
else:
use_original_format = False
@ -134,18 +135,18 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
if media_type == 'series':
if not episode_metadata:
return
series_id = episode_metadata['sonarrSeriesId']
episode_id = episode_metadata['sonarrEpisodeId']
series_id = episode_metadata.sonarrSeriesId
episode_id = episode_metadata.sonarrEpisodeId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, sonarr_series_id=episode_metadata['sonarrSeriesId'], forced=forced,
sonarr_episode_id=episode_metadata['sonarrEpisodeId'])
percent_score=100, sonarr_series_id=episode_metadata.sonarrSeriesId, forced=forced,
sonarr_episode_id=episode_metadata.sonarrEpisodeId)
else:
if not movie_metadata:
return
series_id = ""
episode_id = movie_metadata['radarrId']
episode_id = movie_metadata.radarrId
sync_subtitles(video_path=path, srt_path=subtitle_path, srt_lang=uploaded_language_code2, media_type=media_type,
percent_score=100, radarr_id=movie_metadata['radarrId'], forced=forced)
percent_score=100, radarr_id=movie_metadata.radarrId, forced=forced)
if use_postprocessing:
command = pp_replace(postprocessing_cmd, path, subtitle_path, uploaded_language, uploaded_language_code2,
@ -157,15 +158,15 @@ def manual_upload_subtitle(path, language, forced, hi, media_type, subtitle, aud
if media_type == 'series':
reversed_path = path_mappings.path_replace_reverse(path)
reversed_subtitles_path = path_mappings.path_replace_reverse(subtitle_path)
notify_sonarr(episode_metadata['sonarrSeriesId'])
event_stream(type='series', action='update', payload=episode_metadata['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode_metadata['sonarrEpisodeId'])
notify_sonarr(episode_metadata.sonarrSeriesId)
event_stream(type='series', action='update', payload=episode_metadata.sonarrSeriesId)
event_stream(type='episode-wanted', action='delete', payload=episode_metadata.sonarrEpisodeId)
else:
reversed_path = path_mappings.path_replace_reverse_movie(path)
reversed_subtitles_path = path_mappings.path_replace_reverse_movie(subtitle_path)
notify_radarr(movie_metadata['radarrId'])
event_stream(type='movie', action='update', payload=movie_metadata['radarrId'])
event_stream(type='movie-wanted', action='delete', payload=movie_metadata['radarrId'])
notify_radarr(movie_metadata.radarrId)
event_stream(type='movie', action='update', payload=movie_metadata.radarrId)
event_stream(type='movie-wanted', action='delete', payload=movie_metadata.radarrId)
result = ProcessSubtitlesResult(message=language_from_alpha3(language) + modifier_string + " Subtitles manually "
"uploaded.",

View File

@ -12,7 +12,7 @@ from subtitles.indexer.movies import store_subtitles_movie
from radarr.history import history_log_movie
from app.notifier import send_notifications_movie
from app.get_providers import get_providers
from app.database import get_exclusion_clause, get_audio_profile_languages, TableMovies
from app.database import get_exclusion_clause, get_audio_profile_languages, TableMovies, database, update, select
from app.event_handler import event_stream, show_progress, hide_progress
from ..adaptive_searching import is_search_active, updateFailedAttempts
@ -20,7 +20,7 @@ from ..download import generate_subtitles
def _wanted_movie(movie):
audio_language_list = get_audio_profile_languages(movie['audio_language'])
audio_language_list = get_audio_profile_languages(movie.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
@ -28,48 +28,48 @@ def _wanted_movie(movie):
languages = []
for language in ast.literal_eval(movie['missing_subtitles']):
if is_search_active(desired_language=language, attempt_string=movie['failedAttempts']):
TableMovies.update({TableMovies.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=movie['failedAttempts'])}) \
.where(TableMovies.radarrId == movie['radarrId']) \
.execute()
for language in ast.literal_eval(movie.missing_subtitles):
if is_search_active(desired_language=language, attempt_string=movie.failedAttempts):
database.execute(
update(TableMovies)
.values(failedAttempts=updateFailedAttempts(desired_language=language,
attempt_string=movie.failedAttempts))
.where(TableMovies.radarrId == movie.radarrId))
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
languages.append((language.split(":")[0], hi_, forced_))
else:
logging.info(f"BAZARR Search is throttled by adaptive search for this movie {movie['path']} and "
logging.info(f"BAZARR Search is throttled by adaptive search for this movie {movie.path} and "
f"language: {language}")
for result in generate_subtitles(path_mappings.path_replace_movie(movie['path']),
for result in generate_subtitles(path_mappings.path_replace_movie(movie.path),
languages,
audio_language,
str(movie['sceneName']),
movie['title'],
str(movie.sceneName),
movie.title,
'movie',
check_if_still_required=True):
if result:
store_subtitles_movie(movie['path'], path_mappings.path_replace_movie(movie['path']))
history_log_movie(1, movie['radarrId'], result)
event_stream(type='movie-wanted', action='delete', payload=movie['radarrId'])
send_notifications_movie(movie['radarrId'], result.message)
store_subtitles_movie(movie.path, path_mappings.path_replace_movie(movie.path))
history_log_movie(1, movie.radarrId, result)
event_stream(type='movie-wanted', action='delete', payload=movie.radarrId)
send_notifications_movie(movie.radarrId, result.message)
def wanted_download_subtitles_movie(radarr_id):
movies_details = TableMovies.select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.audio_language,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.title)\
.where((TableMovies.radarrId == radarr_id))\
.dicts()
movies_details = list(movies_details)
movies_details = database.execute(
select(TableMovies.path,
TableMovies.missing_subtitles,
TableMovies.radarrId,
TableMovies.audio_language,
TableMovies.sceneName,
TableMovies.failedAttempts,
TableMovies.title)
.where(TableMovies.radarrId == radarr_id)) \
.all()
for movie in movies_details:
providers_list = get_providers()
@ -84,25 +84,25 @@ def wanted_download_subtitles_movie(radarr_id):
def wanted_search_missing_subtitles_movies():
conditions = [(TableMovies.missing_subtitles != '[]')]
conditions += get_exclusion_clause('movie')
movies = TableMovies.select(TableMovies.radarrId,
TableMovies.tags,
TableMovies.monitored,
TableMovies.title) \
.where(reduce(operator.and_, conditions)) \
.dicts()
movies = list(movies)
movies = database.execute(
select(TableMovies.radarrId,
TableMovies.tags,
TableMovies.monitored,
TableMovies.title)
.where(reduce(operator.and_, conditions))) \
.all()
count_movies = len(movies)
for i, movie in enumerate(movies):
show_progress(id='wanted_movies_progress',
header='Searching subtitles...',
name=movie['title'],
name=movie.title,
value=i,
count=count_movies)
providers = get_providers()
if providers:
wanted_download_subtitles_movie(movie['radarrId'])
wanted_download_subtitles_movie(movie.radarrId)
else:
logging.info("BAZARR All providers are throttled")
break

View File

@ -12,7 +12,8 @@ from subtitles.indexer.series import store_subtitles
from sonarr.history import history_log
from app.notifier import send_notifications
from app.get_providers import get_providers
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes
from app.database import get_exclusion_clause, get_audio_profile_languages, TableShows, TableEpisodes, database, \
update, select
from app.event_handler import event_stream, show_progress, hide_progress
from ..adaptive_searching import is_search_active, updateFailedAttempts
@ -20,20 +21,20 @@ from ..download import generate_subtitles
def _wanted_episode(episode):
audio_language_list = get_audio_profile_languages(episode['audio_language'])
audio_language_list = get_audio_profile_languages(episode.audio_language)
if len(audio_language_list) > 0:
audio_language = audio_language_list[0]['name']
else:
audio_language = 'None'
languages = []
for language in ast.literal_eval(episode['missing_subtitles']):
if is_search_active(desired_language=language, attempt_string=episode['failedAttempts']):
TableEpisodes.update({TableEpisodes.failedAttempts:
updateFailedAttempts(desired_language=language,
attempt_string=episode['failedAttempts'])}) \
.where(TableEpisodes.sonarrEpisodeId == episode['sonarrEpisodeId']) \
.execute()
for language in ast.literal_eval(episode.missing_subtitles):
if is_search_active(desired_language=language, attempt_string=episode.failedAttempts):
database.execute(
update(TableEpisodes)
.values(failedAttempts=updateFailedAttempts(desired_language=language,
attempt_string=episode.failedAttempts))
.where(TableEpisodes.sonarrEpisodeId == episode.sonarrEpisodeId))
hi_ = "True" if language.endswith(':hi') else "False"
forced_ = "True" if language.endswith(':forced') else "False"
@ -41,37 +42,38 @@ def _wanted_episode(episode):
else:
logging.debug(
f"BAZARR Search is throttled by adaptive search for this episode {episode['path']} and "
f"BAZARR Search is throttled by adaptive search for this episode {episode.path} and "
f"language: {language}")
for result in generate_subtitles(path_mappings.path_replace(episode['path']),
for result in generate_subtitles(path_mappings.path_replace(episode.path),
languages,
audio_language,
str(episode['sceneName']),
episode['title'],
str(episode.sceneName),
episode.title,
'series',
check_if_still_required=True):
if result:
store_subtitles(episode['path'], path_mappings.path_replace(episode['path']))
history_log(1, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
event_stream(type='series', action='update', payload=episode['sonarrSeriesId'])
event_stream(type='episode-wanted', action='delete', payload=episode['sonarrEpisodeId'])
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, episode.sonarrSeriesId, episode.sonarrEpisodeId, result)
event_stream(type='series', action='update', payload=episode.sonarrSeriesId)
event_stream(type='episode-wanted', action='delete', payload=episode.sonarrEpisodeId)
send_notifications(episode.sonarrSeriesId, episode.sonarrEpisodeId, result.message)
def wanted_download_subtitles(sonarr_episode_id):
episodes_details = TableEpisodes.select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.audio_language,
TableEpisodes.sceneName,
TableEpisodes.failedAttempts,
TableShows.title)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where((TableEpisodes.sonarrEpisodeId == sonarr_episode_id))\
.dicts()
episodes_details = list(episodes_details)
episodes_details = database.execute(
select(TableEpisodes.path,
TableEpisodes.missing_subtitles,
TableEpisodes.sonarrEpisodeId,
TableEpisodes.sonarrSeriesId,
TableEpisodes.audio_language,
TableEpisodes.sceneName,
TableEpisodes.failedAttempts,
TableShows.title)
.select_from(TableEpisodes)
.join(TableShows)
.where((TableEpisodes.sonarrEpisodeId == sonarr_episode_id))) \
.all()
for episode in episodes_details:
providers_list = get_providers()
@ -86,34 +88,35 @@ def wanted_download_subtitles(sonarr_episode_id):
def wanted_search_missing_subtitles_series():
conditions = [(TableEpisodes.missing_subtitles != '[]')]
conditions += get_exclusion_clause('series')
episodes = TableEpisodes.select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableShows.tags,
TableEpisodes.monitored,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.alias('episodeTitle'),
TableShows.seriesType)\
.join(TableShows, on=(TableEpisodes.sonarrSeriesId == TableShows.sonarrSeriesId))\
.where(reduce(operator.and_, conditions))\
.dicts()
episodes = list(episodes)
episodes = database.execute(
select(TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId,
TableShows.tags,
TableEpisodes.monitored,
TableShows.title,
TableEpisodes.season,
TableEpisodes.episode,
TableEpisodes.title.label('episodeTitle'),
TableShows.seriesType)
.select_from(TableEpisodes)
.join(TableShows)
.where(reduce(operator.and_, conditions))) \
.all()
count_episodes = len(episodes)
for i, episode in enumerate(episodes):
show_progress(id='wanted_episodes_progress',
header='Searching subtitles...',
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode['title'],
episode['season'],
episode['episode'],
episode['episodeTitle']),
name='{0} - S{1:02d}E{2:02d} - {3}'.format(episode.title,
episode.season,
episode.episode,
episode.episodeTitle),
value=i,
count=count_episodes)
providers = get_providers()
if providers:
wanted_download_subtitles(episode['sonarrEpisodeId'])
wanted_download_subtitles(episode.sonarrEpisodeId)
else:
logging.info("BAZARR All providers are throttled")
break

View File

@ -7,6 +7,7 @@ import logging
from ga4mp import GtagMP
from app.get_args import args
from app.config import settings
from radarr.info import get_radarr_info
from sonarr.info import get_sonarr_info
@ -46,7 +47,10 @@ class EventTracker:
self.tracker.store.save()
def track(self, provider, action, language):
def track_subtitles(self, provider, action, language):
if not settings.analytics.getboolean('enabled'):
return
subtitles_event = self.tracker.create_new_event(name="subtitles")
subtitles_event.set_event_param(name="subtitles_provider", value=provider)
@ -60,5 +64,22 @@ class EventTracker:
else:
self.tracker.store.save()
def track_throttling(self, provider, exception_name, exception_info):
if not settings.analytics.getboolean('enabled'):
return
throttling_event = self.tracker.create_new_event(name="throttling")
throttling_event.set_event_param(name="provider", value=provider)
throttling_event.set_event_param(name="exception_name", value=exception_name)
throttling_event.set_event_param(name="exception_info", value=exception_info)
try:
self.tracker.send(events=[throttling_event])
except Exception:
logging.debug("BAZARR unable to track event.")
else:
self.tracker.store.save()
event_tracker = EventTracker()

View File

@ -1,7 +1,7 @@
# coding=utf-8
from app.config import settings
from app.database import TableShowsRootfolder, TableMoviesRootfolder
from app.database import TableShowsRootfolder, TableMoviesRootfolder, database, select
from app.event_handler import event_stream
from .path_mappings import path_mappings
from sonarr.rootfolder import check_sonarr_rootfolder
@ -25,24 +25,26 @@ def get_health_issues():
# get Sonarr rootfolder issues
if settings.general.getboolean('use_sonarr'):
rootfolder = TableShowsRootfolder.select(TableShowsRootfolder.path,
TableShowsRootfolder.accessible,
TableShowsRootfolder.error)\
.where(TableShowsRootfolder.accessible == 0)\
.dicts()
rootfolder = database.execute(
select(TableShowsRootfolder.path,
TableShowsRootfolder.accessible,
TableShowsRootfolder.error)
.where(TableShowsRootfolder.accessible == 0)) \
.all()
for item in rootfolder:
health_issues.append({'object': path_mappings.path_replace(item['path']),
'issue': item['error']})
health_issues.append({'object': path_mappings.path_replace(item.path),
'issue': item.error})
# get Radarr rootfolder issues
if settings.general.getboolean('use_radarr'):
rootfolder = TableMoviesRootfolder.select(TableMoviesRootfolder.path,
TableMoviesRootfolder.accessible,
TableMoviesRootfolder.error)\
.where(TableMoviesRootfolder.accessible == 0)\
.dicts()
rootfolder = database.execute(
select(TableMoviesRootfolder.path,
TableMoviesRootfolder.accessible,
TableMoviesRootfolder.error)
.where(TableMoviesRootfolder.accessible == 0)) \
.all()
for item in rootfolder:
health_issues.append({'object': path_mappings.path_replace_movie(item['path']),
'issue': item['error']})
health_issues.append({'object': path_mappings.path_replace_movie(item.path),
'issue': item.error})
return health_issues

View File

@ -10,10 +10,17 @@ from bs4 import UnicodeDammit
from app.config import settings
def check_credentials(user, pw):
def check_credentials(user, pw, request, log_success=True):
ip_addr = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr)
username = settings.auth.username
password = settings.auth.password
return hashlib.md5(pw.encode('utf-8')).hexdigest() == password and user == username
if hashlib.md5(pw.encode('utf-8')).hexdigest() == password and user == username:
if log_success:
logging.info(f'Successful authentication from {ip_addr} for user {user}')
return True
else:
logging.info(f'Failed authentication from {ip_addr} for user {user}')
return False
def get_subtitle_destination_folder():

View File

@ -7,7 +7,7 @@ from knowit.api import know, KnowitException
from languages.custom_lang import CustomLanguage
from languages.get_languages import language_from_alpha3, alpha3_from_alpha2
from app.database import TableEpisodes, TableMovies
from app.database import TableEpisodes, TableMovies, database, update, select
from utilities.path_mappings import path_mappings
from app.config import settings
@ -116,22 +116,22 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
if use_cache:
# Get the actual cache value form database
if episode_file_id:
cache_key = TableEpisodes.select(TableEpisodes.ffprobe_cache)\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file))\
.dicts()\
.get_or_none()
cache_key = database.execute(
select(TableEpisodes.ffprobe_cache)
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file))) \
.first()
elif movie_file_id:
cache_key = TableMovies.select(TableMovies.ffprobe_cache)\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file))\
.dicts()\
.get_or_none()
cache_key = database.execute(
select(TableMovies.ffprobe_cache)
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file))) \
.first()
else:
cache_key = None
# check if we have a value for that cache key
try:
# Unpickle ffprobe cache
cached_value = pickle.loads(cache_key['ffprobe_cache'])
cached_value = pickle.loads(cache_key.ffprobe_cache)
except Exception:
pass
else:
@ -144,7 +144,7 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
# no valid cache
pass
else:
# cache mut be renewed
# cache must be renewed
pass
# if not, we retrieve the metadata from the file
@ -180,11 +180,13 @@ def parse_video_metadata(file, file_size, episode_file_id=None, movie_file_id=No
# we write to db the result and return the newly cached ffprobe dict
if episode_file_id:
TableEpisodes.update({TableEpisodes.ffprobe_cache: pickle.dumps(data, pickle.HIGHEST_PROTOCOL)})\
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file))\
.execute()
database.execute(
update(TableEpisodes)
.values(ffprobe_cache=pickle.dumps(data, pickle.HIGHEST_PROTOCOL))
.where(TableEpisodes.path == path_mappings.path_replace_reverse(file)))
elif movie_file_id:
TableMovies.update({TableEpisodes.ffprobe_cache: pickle.dumps(data, pickle.HIGHEST_PROTOCOL)})\
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file))\
.execute()
database.execute(
update(TableMovies)
.values(ffprobe_cache=pickle.dumps(data, pickle.HIGHEST_PROTOCOL))
.where(TableMovies.path == path_mappings.path_replace_reverse_movie(file)))
return data

View File

@ -18,7 +18,7 @@ export function useMoviesProvider(radarrId?: number) {
}
},
{
staleTime: Infinity,
staleTime: 0,
}
);
}
@ -32,7 +32,7 @@ export function useEpisodesProvider(episodeId?: number) {
}
},
{
staleTime: Infinity,
staleTime: 0,
}
);
}

View File

@ -87,6 +87,14 @@ const Search: FunctionComponent = () => {
value={query}
onChange={setQuery}
onBlur={() => setQuery("")}
filter={(value, item) =>
item.value.toLowerCase().includes(value.toLowerCase().trim()) ||
item.value
.normalize("NFD")
.replace(/[\u0300-\u036f]/g, "")
.toLowerCase()
.includes(value.trim())
}
></Autocomplete>
);
};

View File

@ -0,0 +1,78 @@
import { BuildKey } from "@/utilities";
import {
faCheck,
faCheckCircle,
faExclamationCircle,
faListCheck,
faTimes,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { Group, List, Popover, Stack, Text } from "@mantine/core";
import { useHover } from "@mantine/hooks";
import { FunctionComponent } from "react";
interface StateIconProps {
matches: string[];
dont: string[];
isHistory: boolean;
}
const StateIcon: FunctionComponent<StateIconProps> = ({
matches,
dont,
isHistory,
}) => {
const hasIssues = dont.length > 0;
const { hovered, ref } = useHover();
const PopoverTarget: FunctionComponent = () => {
if (isHistory) {
return <FontAwesomeIcon icon={faListCheck} />;
} else {
return (
<Text color={hasIssues ? "yellow" : "green"}>
<FontAwesomeIcon
icon={hasIssues ? faExclamationCircle : faCheckCircle}
/>
</Text>
);
}
};
return (
<Popover opened={hovered} position="top" width={360} withArrow withinPortal>
<Popover.Target>
<Text ref={ref}>
<PopoverTarget />
</Text>
</Popover.Target>
<Popover.Dropdown>
<Group position="left" spacing="xl" noWrap grow>
<Stack align="flex-start" justify="flex-start" spacing="xs" mb="auto">
<Text color="green">
<FontAwesomeIcon icon={faCheck}></FontAwesomeIcon>
</Text>
<List>
{matches.map((v, idx) => (
<List.Item key={BuildKey(idx, v, "match")}>{v}</List.Item>
))}
</List>
</Stack>
<Stack align="flex-start" justify="flex-start" spacing="xs" mb="auto">
<Text color="yellow">
<FontAwesomeIcon icon={faTimes}></FontAwesomeIcon>
</Text>
<List>
{dont.map((v, idx) => (
<List.Item key={BuildKey(idx, v, "miss")}>{v}</List.Item>
))}
</List>
</Stack>
</Group>
</Popover.Dropdown>
</Popover>
);
};
export default StateIcon;

View File

@ -83,12 +83,25 @@ const TimeOffsetForm: FunctionComponent<Props> = ({ selections, onSubmit }) => {
></FontAwesomeIcon>
</Button>
<NumberInput
min={0}
label="hour"
{...form.getInputProps("hour")}
></NumberInput>
<NumberInput label="min" {...form.getInputProps("min")}></NumberInput>
<NumberInput label="sec" {...form.getInputProps("sec")}></NumberInput>
<NumberInput label="ms" {...form.getInputProps("ms")}></NumberInput>
<NumberInput
min={0}
label="min"
{...form.getInputProps("min")}
></NumberInput>
<NumberInput
min={0}
label="sec"
{...form.getInputProps("sec")}
></NumberInput>
<NumberInput
min={0}
label="ms"
{...form.getInputProps("ms")}
></NumberInput>
</Group>
<Divider></Divider>
<Button disabled={!enabled} type="submit">

View File

@ -5,6 +5,7 @@ import {
useMovieAddBlacklist,
useMovieHistory,
} from "@/apis/hooks";
import StateIcon from "@/components/StateIcon";
import { withModal } from "@/modules/modals";
import { faFileExcel, faInfoCircle } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
@ -62,6 +63,23 @@ const MovieHistoryView: FunctionComponent<MovieHistoryViewProps> = ({
Header: "Score",
accessor: "score",
},
{
accessor: "matches",
Cell: (row) => {
const { matches, dont_matches: dont } = row.row.original;
if (matches.length || dont.length) {
return (
<StateIcon
matches={matches}
dont={dont}
isHistory={true}
></StateIcon>
);
} else {
return null;
}
},
},
{
Header: "Date",
accessor: "timestamp",
@ -168,6 +186,23 @@ const EpisodeHistoryView: FunctionComponent<EpisodeHistoryViewProps> = ({
Header: "Score",
accessor: "score",
},
{
accessor: "matches",
Cell: (row) => {
const { matches, dont_matches: dont } = row.row.original;
if (matches.length || dont.length) {
return (
<StateIcon
matches={matches}
dont={dont}
isHistory={true}
></StateIcon>
);
} else {
return null;
}
},
},
{
Header: "Date",
accessor: "timestamp",

View File

@ -1,15 +1,11 @@
import { withModal } from "@/modules/modals";
import { task, TaskGroup } from "@/modules/task";
import { useTableStyles } from "@/styles";
import { BuildKey, GetItemId } from "@/utilities";
import { GetItemId } from "@/utilities";
import {
faCaretDown,
faCheck,
faCheckCircle,
faDownload,
faExclamationCircle,
faInfoCircle,
faTimes,
} from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import {
@ -20,19 +16,16 @@ import {
Code,
Collapse,
Divider,
Group,
List,
Popover,
Stack,
Text,
} from "@mantine/core";
import { useHover } from "@mantine/hooks";
import { isString } from "lodash";
import { FunctionComponent, useCallback, useMemo, useState } from "react";
import { useCallback, useMemo, useState } from "react";
import { UseQueryResult } from "react-query";
import { Column } from "react-table";
import { Action, PageTable } from "..";
import Language from "../bazarr/Language";
import StateIcon from "../StateIcon";
type SupportType = Item.Movie | Item.Episode;
@ -47,18 +40,18 @@ interface Props<T extends SupportType> {
function ManualSearchView<T extends SupportType>(props: Props<T>) {
const { download, query: useSearch, item } = props;
const itemId = useMemo(() => GetItemId(item ?? {}), [item]);
const [searchStarted, setSearchStarted] = useState(false);
const [id, setId] = useState<number | undefined>(undefined);
const itemId = useMemo(() => GetItemId(item), [item]);
const results = useSearch(id);
const results = useSearch(searchStarted ? itemId : undefined);
const isStale = results.data === undefined;
const haveResult = results.data !== undefined;
const search = useCallback(() => {
setId(itemId);
setSearchStarted(true);
results.refetch();
}, [itemId, results]);
}, [results]);
const columns = useMemo<Column<SearchResultType>[]>(
() => [
@ -155,7 +148,13 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
accessor: "matches",
Cell: (row) => {
const { matches, dont_matches: dont } = row.row.original;
return <StateIcon matches={matches} dont={dont}></StateIcon>;
return (
<StateIcon
matches={matches}
dont={dont}
isHistory={false}
></StateIcon>
);
},
},
{
@ -191,6 +190,14 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
const bSceneNameAvailable =
isString(item.sceneName) && item.sceneName.length !== 0;
const searchButtonText = useMemo(() => {
if (results.isFetching) {
return "Searching";
}
return searchStarted ? "Search Again" : "Search";
}, [results.isFetching, searchStarted]);
return (
<Stack>
<Alert
@ -202,7 +209,7 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
<Divider hidden={!bSceneNameAvailable} my="xs"></Divider>
<Code hidden={!bSceneNameAvailable}>{item?.sceneName}</Code>
</Alert>
<Collapse in={!isStale && !results.isFetching}>
<Collapse in={haveResult && !results.isFetching}>
<PageTable
tableStyles={{ emptyText: "No result", placeholder: 10 }}
columns={columns}
@ -211,7 +218,7 @@ function ManualSearchView<T extends SupportType>(props: Props<T>) {
</Collapse>
<Divider></Divider>
<Button loading={results.isFetching} fullWidth onClick={search}>
{isStale ? "Search" : "Search Again"}
{searchButtonText}
</Button>
</Stack>
);
@ -227,48 +234,3 @@ export const EpisodeSearchModal = withModal<Props<Item.Episode>>(
"episode-manual-search",
{ title: "Search Subtitles", size: "calc(100vw - 4rem)" }
);
const StateIcon: FunctionComponent<{ matches: string[]; dont: string[] }> = ({
matches,
dont,
}) => {
const hasIssues = dont.length > 0;
const { ref, hovered } = useHover();
return (
<Popover opened={hovered} position="top" width={360} withArrow withinPortal>
<Popover.Target>
<Text color={hasIssues ? "yellow" : "green"} ref={ref}>
<FontAwesomeIcon
icon={hasIssues ? faExclamationCircle : faCheckCircle}
></FontAwesomeIcon>
</Text>
</Popover.Target>
<Popover.Dropdown>
<Group position="left" spacing="xl" noWrap grow>
<Stack align="flex-start" justify="flex-start" spacing="xs" mb="auto">
<Text color="green">
<FontAwesomeIcon icon={faCheck}></FontAwesomeIcon>
</Text>
<List>
{matches.map((v, idx) => (
<List.Item key={BuildKey(idx, v, "match")}>{v}</List.Item>
))}
</List>
</Stack>
<Stack align="flex-start" justify="flex-start" spacing="xs" mb="auto">
<Text color="yellow">
<FontAwesomeIcon icon={faTimes}></FontAwesomeIcon>
</Text>
<List>
{dont.map((v, idx) => (
<List.Item key={BuildKey(idx, v, "miss")}>{v}</List.Item>
))}
</List>
</Stack>
</Group>
</Popover.Dropdown>
</Popover>
);
};

View File

@ -3,6 +3,7 @@ import { useMovieAddBlacklist, useMovieHistoryPagination } from "@/apis/hooks";
import { MutateAction } from "@/components/async";
import { HistoryIcon } from "@/components/bazarr";
import Language from "@/components/bazarr/Language";
import StateIcon from "@/components/StateIcon";
import TextPopover from "@/components/TextPopover";
import HistoryView from "@/pages/views/HistoryView";
import { useTableStyles } from "@/styles";
@ -56,6 +57,23 @@ const MoviesHistoryView: FunctionComponent = () => {
Header: "Score",
accessor: "score",
},
{
accessor: "matches",
Cell: (row) => {
const { matches, dont_matches: dont } = row.row.original;
if (matches.length || dont.length) {
return (
<StateIcon
matches={matches}
dont={dont}
isHistory={true}
></StateIcon>
);
} else {
return null;
}
},
},
{
Header: "Date",
accessor: "timestamp",

View File

@ -6,6 +6,7 @@ import {
import { MutateAction } from "@/components/async";
import { HistoryIcon } from "@/components/bazarr";
import Language from "@/components/bazarr/Language";
import StateIcon from "@/components/StateIcon";
import TextPopover from "@/components/TextPopover";
import HistoryView from "@/pages/views/HistoryView";
import { useTableStyles } from "@/styles";
@ -72,6 +73,23 @@ const SeriesHistoryView: FunctionComponent = () => {
Header: "Score",
accessor: "score",
},
{
accessor: "matches",
Cell: (row) => {
const { matches, dont_matches: dont } = row.row.original;
if (matches.length || dont.length) {
return (
<StateIcon
matches={matches}
dont={dont}
isHistory={true}
></StateIcon>
);
} else {
return null;
}
},
},
{
Header: "Date",
accessor: "timestamp",

View File

@ -73,9 +73,14 @@ const NotificationForm: FunctionComponent<Props> = ({
>
<Stack>
<Selector
searchable
disabled={payload !== null}
{...options}
{...form.getInputProps("selection")}
// We also to update the url, so override the default event from getInputProps
onChange={(value) => {
form.setValues({ selection: value, url: value?.url ?? undefined });
}}
></Selector>
<div hidden={form.values.selection === null}>
<Textarea

View File

@ -364,7 +364,6 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
name: "Subtitulamos.tv",
description: "Spanish Subtitles Provider",
},
{ key: "sucha", description: "LATAM Spanish Subtitles Provider" },
{ key: "supersubtitles" },
{
key: "titlovi",

View File

@ -215,8 +215,8 @@ const SettingsSubtitlesView: FunctionComponent = () => {
settingKey="settings-general-adaptive_searching"
></Check>
<Message>
When searching for subtitles, Bazarr will reduce search frequency to
limit call to providers.
When enabled, Bazarr will skip searching providers for subtitles which
have been searched recently.
</Message>
<CollapseBox settingKey="settings-general-adaptive_searching">
<Selector
@ -225,8 +225,9 @@ const SettingsSubtitlesView: FunctionComponent = () => {
options={adaptiveSearchingDelayOption}
></Selector>
<Message>
In order to reduce search frequency, how many weeks must Bazarr wait
after initial search.
The delay from the first search to adaptive searching applying.
During this window Bazarr will continue to search for subtitles,
even if they have been searched for recently.
</Message>
<Selector
settingKey="settings-general-adaptive_searching_delta"
@ -234,8 +235,9 @@ const SettingsSubtitlesView: FunctionComponent = () => {
options={adaptiveSearchingDeltaOption}
></Selector>
<Message>
How often should Bazarr search for subtitles when in adaptive search
mode.
The delay between Bazarr searching for subtitles in adaptive search
mode. If the media has been searched for more recently than this
value, Bazarr will skip searching for subtitles.
</Message>
</CollapseBox>
<Check

View File

@ -126,7 +126,6 @@ declare namespace Item {
type Series = Base &
SeriesIdType & {
hearing_impaired: boolean;
episodeFileCount: number;
episodeMissingCount: number;
seriesType: SonarrSeriesType;
@ -137,12 +136,7 @@ declare namespace Item {
MovieIdType &
SubtitleType &
MissingSubtitleType &
SceneNameType & {
hearing_impaired: boolean;
audio_codec: string;
// movie_file_id: number;
tmdbId: number;
};
SceneNameType;
type Episode = PathType &
TitleType &
@ -152,13 +146,8 @@ declare namespace Item {
MissingSubtitleType &
SceneNameType &
AudioLanguageType & {
audio_codec: string;
video_codec: string;
season: number;
episode: number;
resolution: string;
format: string;
// episode_file_id: number;
};
}
@ -166,7 +155,6 @@ declare namespace Wanted {
type Base = MonitoredType &
TagType &
SceneNameType & {
// failedAttempts?: any;
hearing_impaired: boolean;
missing_subtitles: Subtitle[];
};
@ -202,16 +190,16 @@ declare namespace History {
TagType &
MonitoredType &
Partial<ItemHistoryType> & {
id: number;
action: number;
blacklisted: boolean;
score?: string;
subs_id?: string;
raw_timestamp: number;
parsed_timestamp: string;
timestamp: string;
description: string;
upgradable: boolean;
matches: string[];
dont_matches: string[];
};
type Movie = History.Base & MovieIdType & TitleType;

6
libs/alembic/__init__.py Normal file
View File

@ -0,0 +1,6 @@
import sys
from . import context
from . import op
__version__ = "1.10.3"

4
libs/alembic/__main__.py Normal file
View File

@ -0,0 +1,4 @@
from .config import main
if __name__ == "__main__":
main(prog="alembic")

View File

@ -0,0 +1,10 @@
from .api import _render_migration_diffs
from .api import compare_metadata
from .api import produce_migrations
from .api import render_python_code
from .api import RevisionContext
from .compare import _produce_net_changes
from .compare import comparators
from .render import render_op_text
from .render import renderers
from .rewriter import Rewriter

View File

@ -0,0 +1,605 @@
from __future__ import annotations
import contextlib
from typing import Any
from typing import Callable
from typing import Dict
from typing import Iterator
from typing import Optional
from typing import Set
from typing import Tuple
from typing import TYPE_CHECKING
from typing import Union
from sqlalchemy import inspect
from . import compare
from . import render
from .. import util
from ..operations import ops
"""Provide the 'autogenerate' feature which can produce migration operations
automatically."""
if TYPE_CHECKING:
from sqlalchemy.engine import Connection
from sqlalchemy.engine import Dialect
from sqlalchemy.engine import Inspector
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.schema import ForeignKeyConstraint
from sqlalchemy.sql.schema import Index
from sqlalchemy.sql.schema import MetaData
from sqlalchemy.sql.schema import Table
from sqlalchemy.sql.schema import UniqueConstraint
from alembic.config import Config
from alembic.operations.ops import MigrationScript
from alembic.operations.ops import UpgradeOps
from alembic.runtime.migration import MigrationContext
from alembic.script.base import Script
from alembic.script.base import ScriptDirectory
def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any:
"""Compare a database schema to that given in a
:class:`~sqlalchemy.schema.MetaData` instance.
The database connection is presented in the context
of a :class:`.MigrationContext` object, which
provides database connectivity as well as optional
comparison functions to use for datatypes and
server defaults - see the "autogenerate" arguments
at :meth:`.EnvironmentContext.configure`
for details on these.
The return format is a list of "diff" directives,
each representing individual differences::
from alembic.migration import MigrationContext
from alembic.autogenerate import compare_metadata
from sqlalchemy.schema import SchemaItem
from sqlalchemy.types import TypeEngine
from sqlalchemy import (create_engine, MetaData, Column,
Integer, String, Table, text)
import pprint
engine = create_engine("sqlite://")
with engine.begin() as conn:
conn.execute(text('''
create table foo (
id integer not null primary key,
old_data varchar,
x integer
)'''))
conn.execute(text('''
create table bar (
data varchar
)'''))
metadata = MetaData()
Table('foo', metadata,
Column('id', Integer, primary_key=True),
Column('data', Integer),
Column('x', Integer, nullable=False)
)
Table('bat', metadata,
Column('info', String)
)
mc = MigrationContext.configure(engine.connect())
diff = compare_metadata(mc, metadata)
pprint.pprint(diff, indent=2, width=20)
Output::
[ ( 'add_table',
Table('bat', MetaData(bind=None),
Column('info', String(), table=<bat>), schema=None)),
( 'remove_table',
Table(u'bar', MetaData(bind=None),
Column(u'data', VARCHAR(), table=<bar>), schema=None)),
( 'add_column',
None,
'foo',
Column('data', Integer(), table=<foo>)),
( 'remove_column',
None,
'foo',
Column(u'old_data', VARCHAR(), table=None)),
[ ( 'modify_nullable',
None,
'foo',
u'x',
{ 'existing_server_default': None,
'existing_type': INTEGER()},
True,
False)]]
:param context: a :class:`.MigrationContext`
instance.
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
instance.
.. seealso::
:func:`.produce_migrations` - produces a :class:`.MigrationScript`
structure based on metadata comparison.
"""
migration_script = produce_migrations(context, metadata)
return migration_script.upgrade_ops.as_diffs()
def produce_migrations(
context: MigrationContext, metadata: MetaData
) -> MigrationScript:
"""Produce a :class:`.MigrationScript` structure based on schema
comparison.
This function does essentially what :func:`.compare_metadata` does,
but then runs the resulting list of diffs to produce the full
:class:`.MigrationScript` object. For an example of what this looks like,
see the example in :ref:`customizing_revision`.
.. seealso::
:func:`.compare_metadata` - returns more fundamental "diff"
data from comparing a schema.
"""
autogen_context = AutogenContext(context, metadata=metadata)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
)
compare._populate_migration_script(autogen_context, migration_script)
return migration_script
def render_python_code(
up_or_down_op: UpgradeOps,
sqlalchemy_module_prefix: str = "sa.",
alembic_module_prefix: str = "op.",
render_as_batch: bool = False,
imports: Tuple[str, ...] = (),
render_item: None = None,
migration_context: Optional[MigrationContext] = None,
) -> str:
"""Render Python code given an :class:`.UpgradeOps` or
:class:`.DowngradeOps` object.
This is a convenience function that can be used to test the
autogenerate output of a user-defined :class:`.MigrationScript` structure.
"""
opts = {
"sqlalchemy_module_prefix": sqlalchemy_module_prefix,
"alembic_module_prefix": alembic_module_prefix,
"render_item": render_item,
"render_as_batch": render_as_batch,
}
if migration_context is None:
from ..runtime.migration import MigrationContext
from sqlalchemy.engine.default import DefaultDialect
migration_context = MigrationContext.configure(
dialect=DefaultDialect()
)
autogen_context = AutogenContext(migration_context, opts=opts)
autogen_context.imports = set(imports)
return render._indent(
render._render_cmd_body(up_or_down_op, autogen_context)
)
def _render_migration_diffs(
context: MigrationContext, template_args: Dict[Any, Any]
) -> None:
"""legacy, used by test_autogen_composition at the moment"""
autogen_context = AutogenContext(context)
upgrade_ops = ops.UpgradeOps([])
compare._produce_net_changes(autogen_context, upgrade_ops)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=upgrade_ops,
downgrade_ops=upgrade_ops.reverse(),
)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
class AutogenContext:
"""Maintains configuration and state that's specific to an
autogenerate operation."""
metadata: Optional[MetaData] = None
"""The :class:`~sqlalchemy.schema.MetaData` object
representing the destination.
This object is the one that is passed within ``env.py``
to the :paramref:`.EnvironmentContext.configure.target_metadata`
parameter. It represents the structure of :class:`.Table` and other
objects as stated in the current database model, and represents the
destination structure for the database being examined.
While the :class:`~sqlalchemy.schema.MetaData` object is primarily
known as a collection of :class:`~sqlalchemy.schema.Table` objects,
it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary
that may be used by end-user schemes to store additional schema-level
objects that are to be compared in custom autogeneration schemes.
"""
connection: Optional[Connection] = None
"""The :class:`~sqlalchemy.engine.base.Connection` object currently
connected to the database backend being compared.
This is obtained from the :attr:`.MigrationContext.bind` and is
ultimately set up in the ``env.py`` script.
"""
dialect: Optional[Dialect] = None
"""The :class:`~sqlalchemy.engine.Dialect` object currently in use.
This is normally obtained from the
:attr:`~sqlalchemy.engine.base.Connection.dialect` attribute.
"""
imports: Set[str] = None # type: ignore[assignment]
"""A ``set()`` which contains string Python import directives.
The directives are to be rendered into the ``${imports}`` section
of a script template. The set is normally empty and can be modified
within hooks such as the
:paramref:`.EnvironmentContext.configure.render_item` hook.
.. seealso::
:ref:`autogen_render_types`
"""
migration_context: MigrationContext = None # type: ignore[assignment]
"""The :class:`.MigrationContext` established by the ``env.py`` script."""
def __init__(
self,
migration_context: MigrationContext,
metadata: Optional[MetaData] = None,
opts: Optional[dict] = None,
autogenerate: bool = True,
) -> None:
if (
autogenerate
and migration_context is not None
and migration_context.as_sql
):
raise util.CommandError(
"autogenerate can't use as_sql=True as it prevents querying "
"the database for schema information"
)
if opts is None:
opts = migration_context.opts
self.metadata = metadata = (
opts.get("target_metadata", None) if metadata is None else metadata
)
if (
autogenerate
and metadata is None
and migration_context is not None
and migration_context.script is not None
):
raise util.CommandError(
"Can't proceed with --autogenerate option; environment "
"script %s does not provide "
"a MetaData object or sequence of objects to the context."
% (migration_context.script.env_py_location)
)
include_object = opts.get("include_object", None)
include_name = opts.get("include_name", None)
object_filters = []
name_filters = []
if include_object:
object_filters.append(include_object)
if include_name:
name_filters.append(include_name)
self._object_filters = object_filters
self._name_filters = name_filters
self.migration_context = migration_context
if self.migration_context is not None:
self.connection = self.migration_context.bind
self.dialect = self.migration_context.dialect
self.imports = set()
self.opts: Dict[str, Any] = opts
self._has_batch: bool = False
@util.memoized_property
def inspector(self) -> Inspector:
if self.connection is None:
raise TypeError(
"can't return inspector as this "
"AutogenContext has no database connection"
)
return inspect(self.connection)
@contextlib.contextmanager
def _within_batch(self) -> Iterator[None]:
self._has_batch = True
yield
self._has_batch = False
def run_name_filters(
self,
name: Optional[str],
type_: str,
parent_names: Dict[str, Optional[str]],
) -> bool:
"""Run the context's name filters and return True if the targets
should be part of the autogenerate operation.
This method should be run for every kind of name encountered within the
reflection side of an autogenerate operation, giving the environment
the chance to filter what names should be reflected as database
objects. The filters here are produced directly via the
:paramref:`.EnvironmentContext.configure.include_name` parameter.
"""
if "schema_name" in parent_names:
if type_ == "table":
table_name = name
else:
table_name = parent_names.get("table_name", None)
if table_name:
schema_name = parent_names["schema_name"]
if schema_name:
parent_names["schema_qualified_table_name"] = "%s.%s" % (
schema_name,
table_name,
)
else:
parent_names["schema_qualified_table_name"] = table_name
for fn in self._name_filters:
if not fn(name, type_, parent_names):
return False
else:
return True
def run_object_filters(
self,
object_: Union[
Table,
Index,
Column,
UniqueConstraint,
ForeignKeyConstraint,
],
name: Optional[str],
type_: str,
reflected: bool,
compare_to: Optional[Union[Table, Index, Column, UniqueConstraint]],
) -> bool:
"""Run the context's object filters and return True if the targets
should be part of the autogenerate operation.
This method should be run for every kind of object encountered within
an autogenerate operation, giving the environment the chance
to filter what objects should be included in the comparison.
The filters here are produced directly via the
:paramref:`.EnvironmentContext.configure.include_object` parameter.
"""
for fn in self._object_filters:
if not fn(object_, name, type_, reflected, compare_to):
return False
else:
return True
run_filters = run_object_filters
@util.memoized_property
def sorted_tables(self):
"""Return an aggregate of the :attr:`.MetaData.sorted_tables`
collection(s).
For a sequence of :class:`.MetaData` objects, this
concatenates the :attr:`.MetaData.sorted_tables` collection
for each individual :class:`.MetaData` in the order of the
sequence. It does **not** collate the sorted tables collections.
"""
result = []
for m in util.to_list(self.metadata):
result.extend(m.sorted_tables)
return result
@util.memoized_property
def table_key_to_table(self):
"""Return an aggregate of the :attr:`.MetaData.tables` dictionaries.
The :attr:`.MetaData.tables` collection is a dictionary of table key
to :class:`.Table`; this method aggregates the dictionary across
multiple :class:`.MetaData` objects into one dictionary.
Duplicate table keys are **not** supported; if two :class:`.MetaData`
objects contain the same table key, an exception is raised.
"""
result = {}
for m in util.to_list(self.metadata):
intersect = set(result).intersection(set(m.tables))
if intersect:
raise ValueError(
"Duplicate table keys across multiple "
"MetaData objects: %s"
% (", ".join('"%s"' % key for key in sorted(intersect)))
)
result.update(m.tables)
return result
class RevisionContext:
"""Maintains configuration and state that's specific to a revision
file generation operation."""
def __init__(
self,
config: Config,
script_directory: ScriptDirectory,
command_args: Dict[str, Any],
process_revision_directives: Optional[Callable] = None,
) -> None:
self.config = config
self.script_directory = script_directory
self.command_args = command_args
self.process_revision_directives = process_revision_directives
self.template_args = {
"config": config # Let templates use config for
# e.g. multiple databases
}
self.generated_revisions = [self._default_revision()]
def _to_script(
self, migration_script: MigrationScript
) -> Optional[Script]:
template_args: Dict[str, Any] = self.template_args.copy()
if getattr(migration_script, "_needs_render", False):
autogen_context = self._last_autogen_context
# clear out existing imports if we are doing multiple
# renders
autogen_context.imports = set()
if migration_script.imports:
autogen_context.imports.update(migration_script.imports)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
assert migration_script.rev_id is not None
return self.script_directory.generate_revision(
migration_script.rev_id,
migration_script.message,
refresh=True,
head=migration_script.head,
splice=migration_script.splice,
branch_labels=migration_script.branch_label,
version_path=migration_script.version_path,
depends_on=migration_script.depends_on,
**template_args,
)
def run_autogenerate(
self, rev: tuple, migration_context: MigrationContext
) -> None:
self._run_environment(rev, migration_context, True)
def run_no_autogenerate(
self, rev: tuple, migration_context: MigrationContext
) -> None:
self._run_environment(rev, migration_context, False)
def _run_environment(
self,
rev: tuple,
migration_context: MigrationContext,
autogenerate: bool,
) -> None:
if autogenerate:
if self.command_args["sql"]:
raise util.CommandError(
"Using --sql with --autogenerate does not make any sense"
)
if set(self.script_directory.get_revisions(rev)) != set(
self.script_directory.get_revisions("heads")
):
raise util.CommandError("Target database is not up to date.")
upgrade_token = migration_context.opts["upgrade_token"]
downgrade_token = migration_context.opts["downgrade_token"]
migration_script = self.generated_revisions[-1]
if not getattr(migration_script, "_needs_render", False):
migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
migration_script.downgrade_ops_list[
-1
].downgrade_token = downgrade_token
migration_script._needs_render = True
else:
migration_script._upgrade_ops.append(
ops.UpgradeOps([], upgrade_token=upgrade_token)
)
migration_script._downgrade_ops.append(
ops.DowngradeOps([], downgrade_token=downgrade_token)
)
autogen_context = AutogenContext(
migration_context, autogenerate=autogenerate
)
self._last_autogen_context: AutogenContext = autogen_context
if autogenerate:
compare._populate_migration_script(
autogen_context, migration_script
)
if self.process_revision_directives:
self.process_revision_directives(
migration_context, rev, self.generated_revisions
)
hook = migration_context.opts["process_revision_directives"]
if hook:
hook(migration_context, rev, self.generated_revisions)
for migration_script in self.generated_revisions:
migration_script._needs_render = True
def _default_revision(self) -> MigrationScript:
command_args: Dict[str, Any] = self.command_args
op = ops.MigrationScript(
rev_id=command_args["rev_id"] or util.rev_id(),
message=command_args["message"],
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
head=command_args["head"],
splice=command_args["splice"],
branch_label=command_args["branch_label"],
version_path=command_args["version_path"],
depends_on=command_args["depends_on"],
)
return op
def generate_scripts(self) -> Iterator[Optional[Script]]:
for generated_revision in self.generated_revisions:
yield self._to_script(generated_revision)

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More