Merge development into master

This commit is contained in:
github-actions[bot] 2023-10-14 12:45:55 +00:00 committed by GitHub
commit a09cc34e09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 559 additions and 189 deletions

View File

@ -2,7 +2,7 @@
## Tools required
- Python 3.7.x to 3.10.x (3.9.x is highly recommended and 3.11 or greater is proscribed).
- Python 3.7.x to 3.11.x (3.9.x is highly recommended and 3.12 or greater is proscribed).
- Pycharm or Visual Studio code IDE are recommended but if you're happy with VIM, enjoy it!
- Git.
- UI testing must be done using Chrome latest version.

View File

@ -55,6 +55,7 @@ If you need something that is not already part of Bazarr, feel free to create a
- Embedded Subtitles
- Gestdown.info
- GreekSubtitles
- HDBits.org
- Hosszupuska
- LegendasDivx
- Karagarga.in

View File

@ -81,6 +81,8 @@ class EpisodesSubtitles(Resource):
title, 'series', profile_id=get_profile_id(episode_id=sonarrEpisodeId)))
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
history_log(1, sonarrSeriesId, sonarrEpisodeId, result)
send_notifications(sonarrSeriesId, sonarrEpisodeId, result.message)
store_subtitles(result.path, episodePath)
@ -155,6 +157,8 @@ class EpisodesSubtitles(Resource):
if not result:
logging.debug(f"BAZARR unable to process subtitles for this episode: {episodePath}")
else:
if isinstance(result, tuple) and len(result):
result = result[0]
provider = "manual"
score = 360
history_log(4, sonarrSeriesId, sonarrEpisodeId, result, fake_provider=provider, fake_score=score)

View File

@ -79,6 +79,8 @@ class MoviesSubtitles(Resource):
sceneName, title, 'movie', profile_id=get_profile_id(movie_id=radarrId)))
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
history_log_movie(1, radarrId, result)
store_subtitles_movie(result.path, moviePath)
else:
@ -151,6 +153,8 @@ class MoviesSubtitles(Resource):
if not result:
logging.debug(f"BAZARR unable to process subtitles for this movie: {moviePath}")
else:
if isinstance(result, tuple) and len(result):
result = result[0]
provider = "manual"
score = 120
history_log_movie(4, radarrId, result, fake_provider=provider, fake_score=score)

View File

@ -137,6 +137,8 @@ class ProviderEpisodes(Resource):
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, tuple) and len(result):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log(2, sonarrSeriesId, sonarrEpisodeId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):

View File

@ -131,6 +131,8 @@ class ProviderMovies(Resource):
except OSError:
return 'Unable to save subtitles file', 500
else:
if isinstance(result, tuple) and len(result):
result = result[0]
if isinstance(result, ProcessSubtitlesResult):
history_log_movie(2, radarrId, result)
if not settings.general.getboolean('dont_notify_manual_actions'):

View File

@ -15,7 +15,8 @@ subtitles_model = {
"code3": fields.String(),
"path": fields.String(),
"forced": fields.Boolean(),
"hi": fields.Boolean()
"hi": fields.Boolean(),
"file_size": fields.Integer()
}
subtitles_language_model = {

View File

@ -62,12 +62,14 @@ def postprocess(item):
item['subtitles'] = ast.literal_eval(item['subtitles'])
for i, subs in enumerate(item['subtitles']):
language = subs[0].split(':')
file_size = subs[2] if len(subs) > 2 else 0
item['subtitles'][i] = {"path": path_replace(subs[1]),
"name": language_from_alpha2(language[0]),
"code2": language[0],
"code3": alpha3_from_alpha2(language[0]),
"forced": False,
"hi": False}
"hi": False,
"file_size": file_size}
if len(language) > 1:
item['subtitles'][i].update(
{

View File

@ -229,6 +229,10 @@ defaults = {
'timeout': '600',
'unknown_as_english': 'False',
},
'hdbits': {
'username': '',
'passkey': '',
},
'karagarga': {
'username': '',
'password': '',

View File

@ -9,7 +9,7 @@ import flask_migrate
from dogpile.cache import make_region
from datetime import datetime
from sqlalchemy import create_engine, inspect, DateTime, ForeignKey, Integer, LargeBinary, Text, func, text
from sqlalchemy import create_engine, inspect, DateTime, ForeignKey, Integer, LargeBinary, Text, func, text, BigInteger
# importing here to be indirectly imported in other modules later
from sqlalchemy import update, delete, select, func # noqa W0611
from sqlalchemy.orm import scoped_session, sessionmaker, mapped_column
@ -128,7 +128,7 @@ class TableEpisodes(Base):
episode_file_id = mapped_column(Integer)
failedAttempts = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(Integer)
file_size = mapped_column(BigInteger)
format = mapped_column(Text)
missing_subtitles = mapped_column(Text)
monitored = mapped_column(Text)
@ -201,7 +201,7 @@ class TableMovies(Base):
failedAttempts = mapped_column(Text)
fanart = mapped_column(Text)
ffprobe_cache = mapped_column(LargeBinary)
file_size = mapped_column(Integer)
file_size = mapped_column(BigInteger)
format = mapped_column(Text)
imdbId = mapped_column(Text)
missing_subtitles = mapped_column(Text)

View File

@ -30,7 +30,7 @@ from sonarr.blacklist import blacklist_log
from utilities.analytics import event_tracker
_TRACEBACK_RE = re.compile(r'File "(.*?providers/.*?)", line (\d+)')
_TRACEBACK_RE = re.compile(r'File "(.*?providers[\\/].*?)", line (\d+)')
def time_until_midnight(timezone):
@ -80,6 +80,7 @@ def provider_throttle_map():
DownloadLimitExceeded: (datetime.timedelta(hours=6), "6 hours"),
DownloadLimitReached: (datetime.timedelta(hours=6), "6 hours"),
APIThrottled: (datetime.timedelta(seconds=15), "15 seconds"),
ServiceUnavailable: (datetime.timedelta(hours=1), "1 hour"),
},
"opensubtitlescom": {
AuthenticationError: (datetime.timedelta(hours=12), "12 hours"),
@ -108,7 +109,13 @@ def provider_throttle_map():
SearchLimitReached: (
legendasdivx_limit_reset_timedelta(),
f"{legendasdivx_limit_reset_timedelta().seconds // 3600 + 1} hours"),
}
},
"subf2m": {
ConfigurationError: (datetime.timedelta(hours=24), "24 hours"),
},
"whisperai": {
ConnectionError: (datetime.timedelta(hours=24), "24 hours"),
},
}
@ -294,6 +301,10 @@ def get_providers_auth():
'f_username': settings.karagarga.f_username,
'f_password': settings.karagarga.f_password,
},
'hdbits': {
'username': settings.hdbits.username,
'passkey': settings.hdbits.passkey,
},
'subf2m': {
'verify_ssl': settings.subf2m.getboolean('verify_ssl'),
'user_agent': settings.subf2m.user_agent,

View File

@ -143,13 +143,22 @@ def movies_images(url):
@check_login
@ui_bp.route('/system/backup/download/<path:filename>', methods=['GET'])
def backup_download(filename):
return send_file(os.path.join(settings.backup.folder, filename), max_age=0, as_attachment=True)
fullpath = os.path.normpath(os.path.join(settings.backup.folder, filename))
if not fullpath.startswith(settings.backup.folder):
return '', 404
else:
return send_file(fullpath, max_age=0, as_attachment=True)
@ui_bp.route('/api/swaggerui/static/<path:filename>', methods=['GET'])
def swaggerui_static(filename):
return send_file(os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'libs', 'flask_restx',
'static', filename))
basepath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'libs', 'flask_restx',
'static')
fullpath = os.path.join(basepath, filename)
if not fullpath.startswith(basepath):
return '', 404
else:
return send_file(fullpath)
def configured():
@ -160,6 +169,8 @@ def configured():
@ui_bp.route('/test', methods=['GET'])
@ui_bp.route('/test/<protocol>/<path:url>', methods=['GET'])
def proxy(protocol, url):
if protocol.lower() not in ['http', 'https']:
return dict(status=False, error='Unsupported protocol')
url = protocol + '://' + unquote(url)
params = request.args
try:

View File

@ -23,6 +23,46 @@ def update_all_movies():
logging.info('BAZARR All existing movie subtitles indexed from disk.')
def get_movie_file_size_from_db(movie_path):
try:
bazarr_file_size = os.path.getsize(path_mappings.path_replace_movie(movie_path))
except OSError:
bazarr_file_size = 0
return bazarr_file_size
# Update movies in DB
def update_movie(updated_movie, send_event):
try:
database.execute(
update(TableMovies).values(updated_movie)
.where(TableMovies.tmdbId == updated_movie['tmdbId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update movie {updated_movie['path']} because of {e}")
else:
store_subtitles_movie(updated_movie['path'],
path_mappings.path_replace_movie(updated_movie['path']))
if send_event:
event_stream(type='movie', action='update', payload=updated_movie['radarrId'])
# Insert new movies in DB
def add_movie(added_movie, send_event):
try:
database.execute(
insert(TableMovies)
.values(added_movie))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {added_movie['path']} because of {e}")
else:
store_subtitles_movie(added_movie['path'],
path_mappings.path_replace_movie(added_movie['path']))
if send_event:
event_stream(type='movie', action='update', payload=int(added_movie['radarrId']))
def update_movies(send_event=True):
check_radarr_rootfolder()
logging.debug('BAZARR Starting movie sync from Radarr.')
@ -49,15 +89,35 @@ def update_movies(send_event=True):
return
else:
# Get current movies in DB
current_movies_db = [x.tmdbId for x in
database.execute(
select(TableMovies.tmdbId))
.all()]
current_movies_id_db = [x.tmdbId for x in
database.execute(
select(TableMovies.tmdbId))
.all()]
current_movies_db_kv = [x.items() for x in [y._asdict()['TableMovies'].__dict__ for y in
database.execute(
select(TableMovies))
.all()]]
current_movies_radarr = []
movies_to_update = []
current_movies_radarr = [str(movie['tmdbId']) for movie in movies if movie['hasFile'] and
'movieFile' in movie and
(movie['movieFile']['size'] > 20480 or
get_movie_file_size_from_db(movie['movieFile']['path']) > 20480)]
movies_to_add = []
altered_movies = []
# Remove old movies from DB
movies_to_delete = list(set(current_movies_id_db) - set(current_movies_radarr))
if len(movies_to_delete):
try:
removed_movies = database.execute(delete(TableMovies)
.where(TableMovies.tmdbId.in_(movies_to_delete))
.returning(TableMovies.radarrId))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete movies because of {e}")
else:
for removed_movie in removed_movies:
if send_event:
event_stream(type='movie', action='delete', payload=removed_movie.radarrId)
# Build new and updated movies
movies_count = len(movies)
@ -71,75 +131,26 @@ def update_movies(send_event=True):
if movie['hasFile'] is True:
if 'movieFile' in movie:
try:
bazarr_file_size = \
os.path.getsize(path_mappings.path_replace_movie(movie['movieFile']['path']))
except OSError:
bazarr_file_size = 0
if movie['movieFile']['size'] > 20480 or bazarr_file_size > 20480:
if (movie['movieFile']['size'] > 20480 or
get_movie_file_size_from_db(movie['movieFile']['path']) > 20480):
# Add movies in radarr to current movies list
current_movies_radarr.append(str(movie['tmdbId']))
if str(movie['tmdbId']) in current_movies_db:
movies_to_update.append(movieParser(movie, action='update',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles))
if str(movie['tmdbId']) in current_movies_id_db:
parsed_movie = movieParser(movie, action='update',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)
if not any([parsed_movie.items() <= x for x in current_movies_db_kv]):
update_movie(parsed_movie, send_event)
else:
movies_to_add.append(movieParser(movie, action='insert',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles))
parsed_movie = movieParser(movie, action='insert',
tags_dict=tagsDict,
movie_default_profile=movie_default_profile,
audio_profiles=audio_profiles)
add_movie(parsed_movie, send_event)
if send_event:
hide_progress(id='movies_progress')
# Remove old movies from DB
removed_movies = list(set(current_movies_db) - set(current_movies_radarr))
for removed_movie in removed_movies:
database.execute(
delete(TableMovies)
.where(TableMovies.tmdbId == removed_movie))
# Update movies in DB
for updated_movie in movies_to_update:
if database.execute(
select(TableMovies)
.filter_by(**updated_movie))\
.first():
continue
else:
database.execute(
update(TableMovies).values(updated_movie)
.where(TableMovies.tmdbId == updated_movie['tmdbId']))
altered_movies.append([updated_movie['tmdbId'],
updated_movie['path'],
updated_movie['radarrId'],
updated_movie['monitored']])
# Insert new movies in DB
for added_movie in movies_to_add:
try:
database.execute(
insert(TableMovies)
.values(added_movie))
except IntegrityError as e:
logging.error(f"BAZARR cannot update movie {added_movie['path']} because of {e}")
continue
altered_movies.append([added_movie['tmdbId'],
added_movie['path'],
added_movie['radarrId'],
added_movie['monitored']])
if send_event:
event_stream(type='movie', action='update', payload=int(added_movie['radarrId']))
# Store subtitles for added or modified movies
for i, altered_movie in enumerate(altered_movies, 1):
store_subtitles_movie(altered_movie[1], path_mappings.path_replace_movie(altered_movie[1]))
logging.debug('BAZARR All movies synced from Radarr into database.')
@ -155,13 +166,17 @@ def update_one_movie(movie_id, action, defer_search=False):
# Remove movie from DB
if action == 'deleted':
if existing_movie:
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
try:
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete movie {path_mappings.path_replace_movie(existing_movie.path)} "
f"because of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
return
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
@ -200,25 +215,33 @@ def update_one_movie(movie_id, action, defer_search=False):
# Remove movie from DB
if not movie and existing_movie:
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
try:
database.execute(
delete(TableMovies)
.where(TableMovies.radarrId == movie_id))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete movie {path_mappings.path_replace_movie(existing_movie.path)} because "
f"of {e}")
else:
event_stream(type='movie', action='delete', payload=int(movie_id))
logging.debug('BAZARR deleted this movie from the database:{}'.format(path_mappings.path_replace_movie(
existing_movie.path)))
return
# Update existing movie in DB
elif movie and existing_movie:
database.execute(
update(TableMovies)
.values(movie)
.where(TableMovies.radarrId == movie['radarrId']))
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
try:
database.execute(
update(TableMovies)
.values(movie)
.where(TableMovies.radarrId == movie['radarrId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update movie {path_mappings.path_replace_movie(movie['path'])} because "
f"of {e}")
else:
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR updated this movie into the database:{}'.format(path_mappings.path_replace_movie(
movie['path'])))
# Insert new movie in DB
elif movie and not existing_movie:
@ -227,7 +250,8 @@ def update_one_movie(movie_id, action, defer_search=False):
insert(TableMovies)
.values(movie))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert movie {movie['path']} because of {e}")
logging.error(f"BAZARR cannot insert movie {path_mappings.path_replace_movie(movie['path'])} because "
f"of {e}")
else:
event_stream(type='movie', action='update', payload=int(movie_id))
logging.debug('BAZARR inserted this movie into the database:{}'.format(path_mappings.path_replace_movie(

View File

@ -28,19 +28,23 @@ def sync_episodes(series_id, send_event=True):
# Get current episodes id in DB
if series_id:
current_episodes_db_list = [row.sonarrEpisodeId for row in
database.execute(
select(TableEpisodes.sonarrEpisodeId,
TableEpisodes.path,
TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.sonarrSeriesId == series_id)).all()]
current_episodes_id_db_list = [row.sonarrEpisodeId for row in
database.execute(
select(TableEpisodes.sonarrEpisodeId,
TableEpisodes.path,
TableEpisodes.sonarrSeriesId)
.where(TableEpisodes.sonarrSeriesId == series_id)).all()]
current_episodes_db_kv = [x.items() for x in [y._asdict()['TableEpisodes'].__dict__ for y in
database.execute(
select(TableEpisodes)
.where(TableEpisodes.sonarrSeriesId == series_id))
.all()]]
else:
return
current_episodes_sonarr = []
episodes_to_update = []
episodes_to_add = []
altered_episodes = []
# Get episodes data for a series from Sonarr
episodes = get_episodes_from_sonarr_api(url=url_sonarr(), apikey_sonarr=apikey_sonarr,
@ -70,76 +74,59 @@ def sync_episodes(series_id, send_event=True):
current_episodes_sonarr.append(episode['id'])
# Parse episode data
if episode['id'] in current_episodes_db_list:
episodes_to_update.append(episodeParser(episode))
if episode['id'] in current_episodes_id_db_list:
parsed_episode = episodeParser(episode)
if not any([parsed_episode.items() <= x for x in current_episodes_db_kv]):
episodes_to_update.append(parsed_episode)
else:
episodes_to_add.append(episodeParser(episode))
# Remove old episodes from DB
removed_episodes = list(set(current_episodes_db_list) - set(current_episodes_sonarr))
episodes_to_delete = list(set(current_episodes_id_db_list) - set(current_episodes_sonarr))
stmt = select(TableEpisodes.path,
TableEpisodes.sonarrSeriesId,
TableEpisodes.sonarrEpisodeId)
for removed_episode in removed_episodes:
episode_to_delete = database.execute(stmt.where(TableEpisodes.sonarrEpisodeId == removed_episode)).first()
if not episode_to_delete:
continue
if len(episodes_to_delete):
try:
database.execute(
delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == removed_episode))
except Exception as e:
logging.error(f"BAZARR cannot delete episode {episode_to_delete.path} because of {e}")
continue
removed_episodes = database.execute(delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId.in_(episodes_to_delete))
.returning(TableEpisodes.sonarrEpisodeId))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete episodes because of {e}")
else:
if send_event:
event_stream(type='episode', action='delete', payload=episode_to_delete.sonarrEpisodeId)
for removed_episode in removed_episodes:
if send_event:
event_stream(type='episode', action='delete', payload=removed_episode.sonarrEpisodeId)
# Update existing episodes in DB
for updated_episode in episodes_to_update:
if database.execute(
select(TableEpisodes)
.filter_by(**updated_episode))\
.first():
continue
if len(episodes_to_update):
try:
database.execute(update(TableEpisodes), episodes_to_update)
except IntegrityError as e:
logging.error(f"BAZARR cannot update episodes because of {e}")
else:
try:
database.execute(
update(TableEpisodes)
.values(updated_episode)
.where(TableEpisodes.sonarrEpisodeId == updated_episode['sonarrEpisodeId']))
except IntegrityError as e:
logging.error(f"BAZARR cannot update episode {updated_episode['path']} because of {e}")
continue
else:
altered_episodes.append([updated_episode['sonarrEpisodeId'],
updated_episode['path'],
updated_episode['sonarrSeriesId']])
for updated_episode in episodes_to_update:
# not using .returning() because it's not supported on executemany() with SQlite
store_subtitles(updated_episode['path'], path_mappings.path_replace(updated_episode['path']))
if send_event:
event_stream(type='episode', action='update', payload=updated_episode['sonarrEpisodeId'])
# Insert new episodes in DB
for added_episode in episodes_to_add:
if len(episodes_to_add):
try:
database.execute(
added_episodes = database.execute(
insert(TableEpisodes)
.values(added_episode))
.values(episodes_to_add)
.returning(TableEpisodes.sonarrEpisodeId, TableEpisodes.path, TableEpisodes.sonarrSeriesId))
except IntegrityError as e:
logging.error(f"BAZARR cannot insert episode {added_episode['path']} because of {e}")
continue
logging.error(f"BAZARR cannot insert episodes because of {e}")
else:
altered_episodes.append([added_episode['sonarrEpisodeId'],
added_episode['path'],
added_episode['monitored']])
if send_event:
event_stream(type='episode', payload=added_episode['sonarrEpisodeId'])
for added_episode in added_episodes:
store_subtitles(added_episode.path, path_mappings.path_replace(added_episode.path))
# Store subtitles for added or modified episodes
for i, altered_episode in enumerate(altered_episodes, 1):
store_subtitles(altered_episode[1], path_mappings.path_replace(altered_episode[1]))
if send_event:
event_stream(type='episode', payload=added_episode.sonarrEpisodeId)
logging.debug('BAZARR All episodes synced from Sonarr into database.')
logging.debug(f'BAZARR All episodes from series ID {series_id} synced from Sonarr into database.')
def sync_one_episode(episode_id, defer_search=False):
@ -178,13 +165,16 @@ def sync_one_episode(episode_id, defer_search=False):
# Remove episode from DB
if not episode and existing_episode:
database.execute(
delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == episode_id))
event_stream(type='episode', action='delete', payload=int(episode_id))
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
existing_episode['path'])))
try:
database.execute(
delete(TableEpisodes)
.where(TableEpisodes.sonarrEpisodeId == episode_id))
except IntegrityError as e:
logging.error(f"BAZARR cannot delete episode {existing_episode.path} because of {e}")
else:
event_stream(type='episode', action='delete', payload=int(episode_id))
logging.debug('BAZARR deleted this episode from the database:{}'.format(path_mappings.path_replace(
existing_episode['path'])))
return
# Update existing episodes in DB

View File

@ -33,7 +33,7 @@ def movies_download_subtitles(no):
TableMovies.monitored)
.where(reduce(operator.and_, conditions))) \
.first()
if not len(movie):
if not movie:
logging.debug("BAZARR no movie with that radarrId can be found in database:", str(no))
return
@ -82,6 +82,8 @@ def movies_download_subtitles(no):
check_if_still_required=True):
if result:
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles_movie(movie.path, moviePath)
history_log_movie(1, no, result)
send_notifications_movie(no, result.message)

View File

@ -92,6 +92,8 @@ def series_download_subtitles(no):
'series',
check_if_still_required=True):
if result:
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, no, episode.sonarrEpisodeId, result)
send_notifications(no, episode.sonarrEpisodeId, result.message)
@ -165,6 +167,8 @@ def episode_download_subtitles(no, send_progress=False):
'series',
check_if_still_required=True):
if result:
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, episode.sonarrSeriesId, episode.sonarrEpisodeId, result)
send_notifications(episode.sonarrSeriesId, episode.sonarrEpisodeId, result.message)

View File

@ -149,13 +149,13 @@ def process_subtitle(subtitle, media_type, audio_language, path, max_score, is_u
reversed_subtitles_path=reversed_subtitles_path,
hearing_impaired=subtitle.language.hi,
matched=list(subtitle.matches or []),
not_matched=_get_not_matched(subtitle, media_type))
not_matched=_get_not_matched(subtitle, media_type)),
def _get_not_matched(subtitle, media_type):
_, _, scores = _get_scores(media_type)
if 'hash' not in subtitle.matches:
if subtitle.matches and isinstance(subtitle.matches, set) and 'hash' not in subtitle.matches:
return list(set(scores) - set(subtitle.matches))
else:
return []

View File

@ -116,7 +116,10 @@ def upgrade_subtitles():
is_upgrade=True))
if result:
result = result[0]
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles(episode['video_path'], path_mappings.path_replace(episode['video_path']))
history_log(3, episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result)
send_notifications(episode['sonarrSeriesId'], episode['sonarrEpisodeId'], result.message)
@ -197,7 +200,10 @@ def upgrade_subtitles():
forced_minimum_score=int(movie['score']),
is_upgrade=True))
if result:
result = result[0]
if isinstance(result, list) and len(result):
result = result[0]
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles_movie(movie['video_path'],
path_mappings.path_replace_movie(movie['video_path']))
history_log_movie(3, movie['radarrId'], result)

View File

@ -53,6 +53,8 @@ def _wanted_movie(movie):
check_if_still_required=True):
if result:
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles_movie(movie.path, path_mappings.path_replace_movie(movie.path))
history_log_movie(1, movie.radarrId, result)
event_stream(type='movie-wanted', action='delete', payload=movie.radarrId)

View File

@ -53,6 +53,8 @@ def _wanted_episode(episode):
'series',
check_if_still_required=True):
if result:
if isinstance(result, tuple) and len(result):
result = result[0]
store_subtitles(episode.path, path_mappings.path_replace(episode.path))
history_log(1, episode.sonarrSeriesId, episode.sonarrEpisodeId, result)
event_stream(type='series', action='update', payload=episode.sonarrSeriesId)

View File

@ -93,6 +93,11 @@ def embedded_audio_reader(file, file_size, episode_file_id=None, movie_file_id=N
audio_list.append(None)
continue
if isinstance(detected_language['language'], str):
logging.error(f"Cannot identify audio track language for this file: {file}. Value detected is "
f"{detected_language['language']}.")
continue
alpha3 = _handle_alpha3(detected_language)
language = language_from_alpha3(alpha3)

View File

@ -127,6 +127,24 @@ export const ProviderList: Readonly<ProviderInfo[]> = [
name: "GreekSubtitles",
description: "Greek Subtitles Provider",
},
{
key: "hdbits",
name: "HDBits.org",
description: "Private Tracker Subtitles Provider",
message:
"You must have 2FA enabled and whitelist your IP if you are running from a server.",
inputs: [
{
type: "text",
key: "username",
},
{
type: "password",
key: "passkey",
name: "Your profile's passkey",
},
],
},
{ key: "hosszupuska", description: "Hungarian Subtitles Provider" },
{
key: "legendasdivx",

View File

@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
import functools
import logging
import hashlib
import logging
import os
import re
import shutil
@ -14,10 +14,12 @@ from fese import container
from fese import FFprobeSubtitleStream
from fese import FFprobeVideoContainer
from fese import tags
from fese.exceptions import ExtractionError
from fese.exceptions import InvalidSource
from subliminal_patch.core import Episode
from subliminal_patch.core import Movie
from subliminal_patch.providers import Provider
from subliminal_patch.providers.utils import blacklist_on
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language
@ -185,6 +187,7 @@ class EmbeddedSubtitlesProvider(Provider):
"series" if isinstance(video, Episode) else "movie",
)
@blacklist_on(ExtractionError)
def download_subtitle(self, subtitle: EmbeddedSubtitle):
path = self._get_subtitle_path(subtitle)
@ -328,7 +331,7 @@ def _discard_possible_incomplete_subtitles(streams):
for stream in streams:
# 500 < 1200
if not stream.language.forced and stream.tags.frames < max_frames // 2:
if not stream.language.forced and stream.tags.frames < max_frames // 3:
logger.debug(
"Possible bad subtitle found: %s (%s frames - %s frames)",
stream,

View File

@ -0,0 +1,189 @@
# -*- coding: utf-8 -*-
import functools
from json import JSONDecodeError
import logging
import re
import time
from babelfish import language_converters
from guessit import guessit
from requests import Session
from subliminal_patch.core import Episode
from subliminal_patch.core import Movie
from subliminal_patch.providers import Provider
from subliminal_patch.providers.utils import get_archive_from_bytes
from subliminal_patch.providers.utils import get_subtitle_from_archive
from subliminal_patch.providers.utils import update_matches
from subliminal_patch.subtitle import Subtitle
from subzero.language import Language
logger = logging.getLogger(__name__)
class HDBitsSubtitle(Subtitle):
provider_name = "hdbits"
hash_verifiable = False
def __init__(self, language, id, name, filename, matches=None, episode=None):
super().__init__(language, hearing_impaired=language.hi)
self.item_id = id
self.release_info = name
self.filename = filename
self.episode = episode
self._matches = matches or set()
def get_matches(self, video):
update_matches(self._matches, video, self.release_info)
return self._matches
@property
def id(self):
return f"{self.provider_name}_{self.item_id}"
_SPECIAL_LANG_MAP = {"uk": ("eng",), "br": ("por", "BR"), "gr": ("ell",)}
_ALLOWED_EXTENSIONS = (".ass", ".srt", ".zip", ".rar")
_FILTER = re.compile("extra|commentary|lyrics|forced")
def _get_language(code):
special_args = _SPECIAL_LANG_MAP.get(code)
if special_args is None:
try:
return Language.fromietf(code)
except Exception as error:
logger.debug("Error [%s] loading language with '%s' code", error, code)
return None
return Language(*special_args)
class HDBitsProvider(Provider):
provider_name = "hdbits"
video_types = (Movie, Episode)
subtitle_class = HDBitsSubtitle
languages = {Language("por", "BR")} | {
Language.fromalpha2(l) for l in language_converters["alpha2"].codes
}
def __init__(self, username, passkey) -> None:
self._session = Session()
self._def_params = {"username": username, "passkey": passkey}
self._session.headers.update({"User-Agent": "Bazarr"})
def initialize(self):
pass
def terminate(self):
self._session.close()
def list_subtitles(self, video, languages):
episode = None
if isinstance(video, Movie):
lookup = {"imdb": {"id": (video.imdb_id or "").lstrip("tt")}}
matches = {"imdb_id", "title", "year"}
else:
lookup = {"tvdb": {"id": video.series_tvdb_id, "season": video.season}}
matches = {"tvdb_id", "imdb_id", "series", "title", "episode", "season"}
episode = video.episode
logger.debug("ID lookup: %s", lookup)
response = self._session.post(
"https://hdbits.org/api/torrents", json={**self._def_params, **lookup}
)
response.raise_for_status()
ids = [item["id"] for item in response.json()["data"]]
subtitles = []
for torrent_id in ids:
subtitles.extend(
self._parse_subtitles(torrent_id, languages, episode, matches)
)
time.sleep(0.5)
return subtitles
def _parse_subtitles(self, torrent_id, languages, episode=None, matches=None):
response = self._session.post(
"https://hdbits.org/api/subtitles",
json={**self._def_params, **{"torrent_id": torrent_id}},
)
try:
subtitles = response.json()["data"]
except JSONDecodeError:
logger.debug("Couldn't get reponse for %s", torrent_id)
return []
parsed_subs = []
for subtitle in subtitles:
if not subtitle["filename"].endswith(_ALLOWED_EXTENSIONS):
logger.debug("Extension not supported: %s", subtitle["filename"])
continue
language = _get_language(subtitle["language"])
if language is None:
continue
if not _is_allowed(subtitle):
continue
if language not in languages:
logger.debug("Ignoring language: %r !~ %r", language, languages)
continue
if episode is not None:
eps = _memoized_episode_guess(subtitle["title"]).get("episode")
if eps is not None and episode not in eps:
logger.debug("Not matched: %s != %s", subtitle["title"], episode)
continue
parsed = HDBitsSubtitle(
language,
subtitle["id"],
subtitle["title"],
subtitle["filename"],
matches,
episode,
)
parsed_subs.append(parsed)
return parsed_subs
def download_subtitle(self, subtitle):
response = self._session.get(
f"https://hdbits.org/getdox.php?id={subtitle.item_id}&passkey={self._def_params['passkey']}"
)
response.raise_for_status()
if subtitle.filename.endswith((".zip", ".rar")):
archive = get_archive_from_bytes(response.content)
subtitle.content = get_subtitle_from_archive(
archive, episode=subtitle.episode
)
else:
subtitle.content = response.content
def _is_allowed(subtitle):
for val in (subtitle["title"], subtitle["filename"]):
if _FILTER.search(val.lower()):
logger.debug("Not allowed subtitle: %s", subtitle)
return False
return True
@functools.lru_cache(2048)
def _memoized_episode_guess(content):
# Use include to save time from unnecessary checks
return guessit(
content,
{
"type": "episode",
# Add codec keys to avoid matching x264, 5.1, etc as episode info
"includes": ["season", "episode", "video_codec", "audio_codec"],
"enforce_list": True,
},
)

View File

@ -184,13 +184,14 @@ class HosszupuskaProvider(Provider, ProviderSubtitleArchiveMixin):
# sub_date = datas[4].getText()
sub_year = sub_english_name = sub_version = None
# Handle the case when '(' in subtitle
if datas[1].getText().count('(') == 1:
sub_english_name = _SUB_ENGLISH_NAME_RE.split(datas[1].getText())[3]
if datas[1].getText().count('(') == 2:
sub_year = _SUB_YEAR_RE.findall(datas[1].getText().strip())[0]
sub_year_search = _SUB_YEAR_RE.findall(datas[1].getText().strip())
if sub_year_search and len(sub_year_search):
sub_year = sub_year_search[0]
sub_english_name = _SUB_ENGLISH_NAME_RE.split(datas[1].getText().split('(')[0])[0]
if not sub_english_name:
@ -203,7 +204,7 @@ class HosszupuskaProvider(Provider, ProviderSubtitleArchiveMixin):
if sub_season == season and sub_episode == episode:
sub_language = self.get_language(datas[2].find_all('img')[0]['src'].split('/')[1])
sub_downloadlink = datas[6].find_all('a')[1]['href']
sub_downloadlink = datas[6].find_all('a')[0]['href']
sub_id = sub_downloadlink.split('=')[1].split('.')[0]
if datas[1].getText().count('(') == 1:

View File

@ -473,7 +473,16 @@ class OpenSubtitlesComProvider(ProviderRetryMixin, Provider):
except Exception:
status_code = None
else:
if status_code == 401:
if status_code == 400:
try:
json_response = response.json()
message = json_response['message']
except JSONDecodeError:
raise ProviderError('Invalid JSON returned by provider')
else:
log_request_response(response)
raise ConfigurationError(message)
elif status_code == 401:
log_request_response(response)
self.reset_token()
if is_retry:

View File

@ -29,6 +29,7 @@ _CLEAN_TITLE_RES = [
_SPANISH_RE = re.compile(r"españa|ib[eé]rico|castellano|gallego|castilla")
_YEAR_RE = re.compile(r"(\(\d{4}\))")
_YEAR_RE_INT = re.compile(r"\((\d{4})\)")
_SERIES_RE = re.compile(
@ -351,7 +352,14 @@ def _check_episode(video, title):
def _check_movie(video, title):
if str(video.year) not in title:
try:
year = int(_YEAR_RE_INT.search(title).group(1)) # type: ignore
except (AttributeError, ValueError):
logger.debug("Year not found in title (%s). Discarding movie", title)
return False
if video.year and abs(year - video.year) > 1:
logger.debug("Year not matching: %s -> %s", year, video.year)
return False
aka_split = re.split("aka", title, flags=re.IGNORECASE)

View File

@ -12,6 +12,7 @@ from guessit import guessit
import pysubs2
import rarfile
from subliminal.subtitle import fix_line_ending
from subliminal_patch.exceptions import MustGetBlacklisted
from subliminal_patch.core import Episode
from subliminal_patch.subtitle import guess_matches
@ -23,6 +24,22 @@ logger = logging.getLogger(__name__)
_MatchingSub = namedtuple("_MatchingSub", ("file", "priority", "context"))
def blacklist_on(*exc_types):
"Raise MustGetBlacklisted if any of the exc_types are raised."
def decorator(method):
def wrapper(self, subtitle):
try:
return method(self, subtitle)
except exc_types:
logger.error("Sending blacklist exception", exc_info=True)
raise MustGetBlacklisted(subtitle.id, subtitle.media_type)
return wrapper
return decorator
def _get_matching_sub(
sub_names, forced=False, episode=None, episode_title=None, **kwargs
):
@ -169,11 +186,12 @@ def update_matches(
video,
release_info: Union[str, Iterable[str]],
split="\n",
**guessit_options
**guessit_options,
):
"""Update matches set from release info string or Iterable.
Use the split parameter to iterate over the set delimiter; set None to avoid split."""
Use the split parameter to iterate over the set delimiter; set None to avoid split.
"""
guessit_options["type"] = "episode" if isinstance(video, Episode) else "movie"

View File

@ -0,0 +1,35 @@
"""empty message
Revision ID: cee6a710cb71
Revises: 195144da1f7e
Create Date: 2023-09-20 23:11:15.678439
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'cee6a710cb71'
down_revision = '195144da1f7e'
branch_labels = None
depends_on = None
bind = op.get_context().bind
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
if bind.engine.name == 'postgresql':
with op.batch_alter_table('table_episodes') as batch_op:
batch_op.alter_column('file_size', type_=sa.BigInteger())
with op.batch_alter_table('table_movies') as batch_op:
batch_op.alter_column('file_size', type_=sa.BigInteger())
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -14,7 +14,7 @@ except ImportError:
pass
from app.database import TableHistory, TableHistoryMovie, TableBlacklist, TableBlacklistMovie, TableEpisodes, \
TableShows, TableMovies, TableLanguagesProfiles
TableShows, TableMovies, TableLanguagesProfiles, TableShowsRootfolder, TableMoviesRootfolder
# revision identifiers, used by Alembic.
revision = 'dc09994b7e65'
@ -72,7 +72,7 @@ def upgrade():
# Update series table
with op.batch_alter_table('table_shows', recreate=should_recreate) as batch_op:
if bind.engine.name == 'postgresql':
batch_op.execute('ALTER TABLE table_shows DROP CONSTRAINT IF EXISTS table_shows_pkey;')
batch_op.execute('ALTER TABLE table_shows DROP CONSTRAINT IF EXISTS table_shows_pkey CASCADE;')
batch_op.execute(sa.update(TableShows)
.values({TableShows.profileId: None})
.where(TableShows.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
@ -101,7 +101,7 @@ def upgrade():
# Update episodes table
with op.batch_alter_table('table_episodes') as batch_op:
if bind.engine.name == 'postgresql':
batch_op.execute('ALTER TABLE table_episodes DROP CONSTRAINT IF EXISTS table_episodes_pkey;')
batch_op.execute('ALTER TABLE table_episodes DROP CONSTRAINT IF EXISTS table_episodes_pkey CASCADE;')
batch_op.execute(sa.delete(TableEpisodes).where(TableEpisodes.sonarrSeriesId.not_in(
sa.select(TableShows.sonarrSeriesId))))
batch_op.alter_column(column_name='sonarrSeriesId', existing_type=sa.INTEGER(), nullable=True)
@ -178,16 +178,17 @@ def upgrade():
# Update series rootfolder table
with op.batch_alter_table('table_shows_rootfolder') as batch_op:
batch_op.execute(sa.delete(TableShowsRootfolder))
if bind.engine.name == 'postgresql':
batch_op.execute('ALTER TABLE table_shows_rootfolder DROP CONSTRAINT IF EXISTS '
'table_shows_rootfolder_pkey;')
'table_shows_rootfolder_pkey CASCADE;')
batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
batch_op.create_primary_key(constraint_name='pk_table_shows_rootfolder', columns=['id'])
# Update movies table
with op.batch_alter_table('table_movies', recreate=should_recreate) as batch_op:
if bind.engine.name == 'postgresql':
batch_op.execute('ALTER TABLE table_movies DROP CONSTRAINT IF EXISTS table_movies_pkey;')
batch_op.execute('ALTER TABLE table_movies DROP CONSTRAINT IF EXISTS table_movies_pkey CASCADE;')
batch_op.execute(sa.update(TableMovies)
.values({TableMovies.profileId: None})
.where(TableMovies.profileId.not_in(sa.select(TableLanguagesProfiles.profileId))))
@ -259,9 +260,10 @@ def upgrade():
# Update movies rootfolder table
with op.batch_alter_table('table_movies_rootfolder') as batch_op:
batch_op.execute(sa.delete(TableMoviesRootfolder))
if bind.engine.name == 'postgresql':
batch_op.execute('ALTER TABLE table_movies_rootfolder DROP CONSTRAINT IF EXISTS '
'table_movies_rootfolder_pkey;')
'table_movies_rootfolder_pkey CASCADE;')
batch_op.alter_column(column_name='id', existing_type=sa.INTEGER(), nullable=False, autoincrement=True)
batch_op.create_primary_key(constraint_name='pk_table_movies_rootfolder', columns=['id'])
# ### end Alembic commands ###

View File

@ -26,6 +26,15 @@ def test_list_subtitles_movie_with_year_fallback(movies):
assert provider.list_subtitles(item, {Language("spa", "MX")})
def test_list_subtitles_movie_with_one_difference_year(movies):
item = list(movies.values())[0]
item.title = "Sisu"
item.year = 2023
with SubdivxSubtitlesProvider() as provider:
assert provider.list_subtitles(item, {Language("spa", "MX")})
def test_handle_multi_page_search(episodes):
with SubdivxSubtitlesProvider() as provider:
for _ in provider._handle_multi_page_search(
@ -74,6 +83,7 @@ def test_list_subtitles_episode_with_title_only_fallback(episodes):
subtitles = provider.list_subtitles(item, {Language("spa", "MX")})
assert len(subtitles) > 2
def test_list_subtitles_episode_with_episode_title_fallback(episodes):
item = list(episodes.values())[0]
item.series = "30 for 30"