2018-10-31 19:34:40 +00:00
|
|
|
# coding=utf-8
|
2018-08-16 02:01:49 +00:00
|
|
|
|
2017-09-17 00:11:47 +00:00
|
|
|
import os
|
2019-01-24 20:39:23 +00:00
|
|
|
import sys
|
2017-10-16 23:27:19 +00:00
|
|
|
import ast
|
2017-12-06 04:07:37 +00:00
|
|
|
import logging
|
2018-03-24 00:00:50 +00:00
|
|
|
import subprocess
|
2018-08-27 00:53:02 +00:00
|
|
|
import time
|
2018-10-31 19:34:40 +00:00
|
|
|
import cPickle as pickle
|
|
|
|
import codecs
|
2018-11-29 13:03:44 +00:00
|
|
|
import types
|
2019-02-05 11:57:45 +00:00
|
|
|
import re
|
2018-10-31 16:33:19 +00:00
|
|
|
import subliminal
|
2019-08-13 17:46:29 +00:00
|
|
|
import platform
|
2019-08-16 01:07:40 +00:00
|
|
|
import operator
|
2018-08-27 00:53:02 +00:00
|
|
|
from datetime import datetime, timedelta
|
2018-10-31 17:10:04 +00:00
|
|
|
from subzero.language import Language
|
2019-02-05 03:57:15 +00:00
|
|
|
from subzero.video import parse_video
|
2018-11-29 14:01:22 +00:00
|
|
|
from subliminal import region, score as subliminal_scores, \
|
2019-02-05 03:57:15 +00:00
|
|
|
list_subtitles, Episode, Movie
|
2019-03-31 03:09:52 +00:00
|
|
|
from subliminal_patch.core import SZAsyncProviderPool, download_best_subtitles, save_subtitles, download_subtitles, \
|
|
|
|
list_all_subtitles
|
2018-10-31 17:10:04 +00:00
|
|
|
from subliminal_patch.score import compute_score
|
2019-03-20 21:20:10 +00:00
|
|
|
from subliminal.refiners.tvdb import series_re
|
2019-01-01 03:37:25 +00:00
|
|
|
from get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2, language_from_alpha2
|
2018-12-15 00:36:28 +00:00
|
|
|
from config import settings
|
|
|
|
from helper import path_replace, path_replace_movie, path_replace_reverse, \
|
2019-01-24 20:39:23 +00:00
|
|
|
path_replace_reverse_movie, pp_replace, get_target_folder, force_unicode
|
2018-08-15 20:51:46 +00:00
|
|
|
from list_subtitles import store_subtitles, list_missing_subtitles, store_subtitles_movie, list_missing_subtitles_movies
|
2019-07-10 13:36:49 +00:00
|
|
|
from utils import history_log, history_log_movie, get_binary
|
2018-04-24 14:48:52 +00:00
|
|
|
from notifier import send_notifications, send_notifications_movie
|
2019-01-24 14:00:03 +00:00
|
|
|
from get_providers import get_providers, get_providers_auth, provider_throttle, provider_pool
|
2018-10-31 19:34:40 +00:00
|
|
|
from get_args import args
|
2019-02-21 04:30:25 +00:00
|
|
|
from queueconfig import notifications
|
2019-07-10 13:36:49 +00:00
|
|
|
from pymediainfo import MediaInfo
|
2019-08-19 22:13:29 +00:00
|
|
|
from database import TableShows, TableEpisodes, TableMovies, TableHistory, TableHistoryMovie
|
2019-08-17 14:24:55 +00:00
|
|
|
from peewee import fn, JOIN
|
2017-09-17 00:11:47 +00:00
|
|
|
|
2018-10-31 17:10:04 +00:00
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
def get_video(path, title, sceneName, use_scenename, use_mediainfo, providers=None, media_type="movie"):
|
2018-11-29 13:47:56 +00:00
|
|
|
"""
|
|
|
|
Construct `Video` instance
|
|
|
|
:param path: path to video
|
|
|
|
:param title: series/movie title
|
|
|
|
:param sceneName: sceneName
|
|
|
|
:param use_scenename: use sceneName
|
2019-07-10 13:36:49 +00:00
|
|
|
:param use_mediainfo: use media info to refine the video
|
2018-11-29 13:47:56 +00:00
|
|
|
:param providers: provider list for selective hashing
|
|
|
|
:param media_type: movie/series
|
|
|
|
:return: `Video` instance
|
|
|
|
"""
|
2018-11-29 13:03:44 +00:00
|
|
|
hints = {"title": title, "type": "movie" if media_type == "movie" else "episode"}
|
2019-06-21 13:06:27 +00:00
|
|
|
used_scene_name = False
|
2019-01-16 14:34:33 +00:00
|
|
|
original_path = path
|
2019-01-15 12:35:58 +00:00
|
|
|
original_name = os.path.basename(path)
|
2019-06-21 13:06:27 +00:00
|
|
|
hash_from = None
|
2018-11-29 13:03:44 +00:00
|
|
|
if sceneName != "None" and use_scenename:
|
|
|
|
# use the sceneName but keep the folder structure for better guessing
|
|
|
|
path = os.path.join(os.path.dirname(path), sceneName + os.path.splitext(path)[1])
|
2019-06-21 13:06:27 +00:00
|
|
|
used_scene_name = True
|
|
|
|
hash_from = original_path
|
2019-07-28 19:49:19 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
video = parse_video(path, hints=hints, providers=providers, dry_run=used_scene_name,
|
|
|
|
hash_from=hash_from)
|
|
|
|
video.used_scene_name = used_scene_name
|
|
|
|
video.original_name = original_name
|
|
|
|
video.original_path = original_path
|
|
|
|
refine_from_db(original_path, video)
|
2019-02-12 08:51:31 +00:00
|
|
|
|
2019-08-13 17:46:29 +00:00
|
|
|
if platform.system() != "Linux" and use_mediainfo:
|
2019-07-28 19:49:19 +00:00
|
|
|
refine_from_mediainfo(original_path, video)
|
|
|
|
|
|
|
|
logging.debug('BAZARR is using those video object properties: %s', vars(video))
|
|
|
|
return video
|
|
|
|
|
|
|
|
except:
|
|
|
|
logging.exception("BAZARR Error trying to get video information for this file: " + path)
|
2018-11-29 13:03:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_scores(video, media_type, min_score_movie_perc=60 * 100 / 120.0, min_score_series_perc=240 * 100 / 360.0,
|
|
|
|
min_score_special_ep=180 * 100 / 360.0):
|
2018-11-29 13:47:56 +00:00
|
|
|
"""
|
|
|
|
Get score range for a video.
|
|
|
|
:param video: `Video` instance
|
|
|
|
:param media_type: movie/series
|
|
|
|
:param min_score_movie_perc: Percentage of max score for min score of movies
|
|
|
|
:param min_score_series_perc: Percentage of max score for min score of series
|
|
|
|
:param min_score_special_ep: Percentage of max score for min score of series special episode
|
|
|
|
:return: tuple(min_score, max_score, set(scores))
|
|
|
|
"""
|
2018-11-29 13:21:48 +00:00
|
|
|
max_score = 120.0
|
2018-11-29 13:03:44 +00:00
|
|
|
min_score = max_score * min_score_movie_perc / 100.0
|
|
|
|
scores = subliminal_scores.movie_scores.keys()
|
|
|
|
if media_type == "series":
|
2018-11-29 13:21:48 +00:00
|
|
|
max_score = 360.0
|
2018-11-29 13:03:44 +00:00
|
|
|
min_score = max_score * min_score_series_perc / 100.0
|
|
|
|
scores = subliminal_scores.episode_scores.keys()
|
|
|
|
if video.is_special:
|
|
|
|
min_score = max_score * min_score_special_ep / 100.0
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
return min_score, max_score, set(scores)
|
|
|
|
|
|
|
|
|
2019-06-11 18:45:48 +00:00
|
|
|
def download_subtitle(path, language, hi, forced, providers, providers_auth, sceneName, title, media_type,
|
|
|
|
forced_minimum_score=None, is_upgrade=False):
|
2018-11-29 13:03:44 +00:00
|
|
|
# fixme: supply all missing languages, not only one, to hit providers only once who support multiple languages in
|
|
|
|
# one query
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-06-04 00:16:37 +00:00
|
|
|
if settings.general.getboolean('utf8_encode'):
|
|
|
|
os.environ["SZ_KEEP_ENCODING"] = ""
|
|
|
|
else:
|
2019-08-16 01:29:21 +00:00
|
|
|
os.environ["SZ_KEEP_ENCODING"] = "True"
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Searching subtitles for this file: ' + path)
|
2018-09-10 21:10:33 +00:00
|
|
|
if hi == "True":
|
2019-05-03 17:37:29 +00:00
|
|
|
hi = "force HI"
|
2018-09-10 21:10:33 +00:00
|
|
|
else:
|
2019-05-03 17:37:29 +00:00
|
|
|
hi = "force non-HI"
|
2018-10-09 20:33:18 +00:00
|
|
|
language_set = set()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-11-29 13:11:10 +00:00
|
|
|
if not isinstance(language, types.ListType):
|
2018-11-29 13:59:18 +00:00
|
|
|
language = [language]
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-03 02:15:45 +00:00
|
|
|
if forced == "True":
|
2019-07-28 19:49:19 +00:00
|
|
|
providers_auth['podnapisi']['only_foreign'] = True ## fixme: This is also in get_providers_auth()
|
|
|
|
providers_auth['subscene']['only_foreign'] = True ## fixme: This is also in get_providers_auth()
|
|
|
|
providers_auth['opensubtitles']['only_foreign'] = True ## fixme: This is also in get_providers_auth()
|
2019-04-03 02:15:45 +00:00
|
|
|
else:
|
|
|
|
providers_auth['podnapisi']['only_foreign'] = False
|
|
|
|
providers_auth['subscene']['only_foreign'] = False
|
|
|
|
providers_auth['opensubtitles']['only_foreign'] = False
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-11-29 13:11:10 +00:00
|
|
|
for l in language:
|
|
|
|
if l == 'pob':
|
2019-03-31 03:09:52 +00:00
|
|
|
lang_obj = Language('por', 'BR')
|
2019-08-16 01:55:21 +00:00
|
|
|
if forced == "True":
|
2019-04-03 14:59:48 +00:00
|
|
|
lang_obj = Language.rebuild(lang_obj, forced=True)
|
2018-11-29 13:11:10 +00:00
|
|
|
else:
|
2019-03-31 03:09:52 +00:00
|
|
|
lang_obj = Language(l)
|
2019-08-16 01:55:21 +00:00
|
|
|
if forced == "True":
|
2019-04-03 15:48:46 +00:00
|
|
|
lang_obj = Language.rebuild(lang_obj, forced=True)
|
2019-03-31 03:09:52 +00:00
|
|
|
language_set.add(lang_obj)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-12-27 19:19:59 +00:00
|
|
|
use_scenename = settings.general.getboolean('use_scenename')
|
2019-07-10 13:36:49 +00:00
|
|
|
use_mediainfo = settings.general.getboolean('use_mediainfo')
|
2018-12-15 00:36:28 +00:00
|
|
|
minimum_score = settings.general.minimum_score
|
|
|
|
minimum_score_movie = settings.general.minimum_score_movie
|
2018-12-27 19:19:59 +00:00
|
|
|
use_postprocessing = settings.general.getboolean('use_postprocessing')
|
2018-12-15 00:36:28 +00:00
|
|
|
postprocessing_cmd = settings.general.postprocessing_cmd
|
2019-01-02 19:19:31 +00:00
|
|
|
single = settings.general.getboolean('single_language')
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-10-31 17:10:04 +00:00
|
|
|
# todo:
|
|
|
|
"""
|
|
|
|
AsyncProviderPool:
|
|
|
|
implement:
|
2019-01-01 20:15:12 +00:00
|
|
|
blacklist=None,
|
|
|
|
pre_download_hook=None,
|
|
|
|
post_download_hook=None,
|
2018-10-31 17:10:04 +00:00
|
|
|
language_hook=None
|
|
|
|
"""
|
2019-07-28 19:49:19 +00:00
|
|
|
video = get_video(force_unicode(path), title, sceneName, use_scenename, use_mediainfo, providers=providers,
|
|
|
|
media_type=media_type)
|
2018-11-29 13:03:44 +00:00
|
|
|
if video:
|
|
|
|
min_score, max_score, scores = get_scores(video, media_type, min_score_movie_perc=int(minimum_score_movie),
|
|
|
|
min_score_series_perc=int(minimum_score))
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-01-24 14:00:03 +00:00
|
|
|
if providers:
|
2019-03-16 19:30:06 +00:00
|
|
|
if forced_minimum_score:
|
|
|
|
min_score = int(forced_minimum_score) + 1
|
2019-01-24 14:00:03 +00:00
|
|
|
downloaded_subtitles = download_best_subtitles({video}, language_set, int(min_score), hi,
|
|
|
|
providers=providers,
|
|
|
|
provider_configs=providers_auth,
|
|
|
|
pool_class=provider_pool(),
|
|
|
|
compute_score=compute_score,
|
|
|
|
throttle_time=None, # fixme
|
|
|
|
blacklist=None, # fixme
|
|
|
|
throttle_callback=provider_throttle,
|
|
|
|
pre_download_hook=None, # fixme
|
|
|
|
post_download_hook=None, # fixme
|
|
|
|
language_hook=None) # fixme
|
|
|
|
else:
|
|
|
|
downloaded_subtitles = None
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
2019-07-28 19:49:19 +00:00
|
|
|
return None
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
saved_any = False
|
|
|
|
if downloaded_subtitles:
|
|
|
|
for video, subtitles in downloaded_subtitles.iteritems():
|
|
|
|
if not subtitles:
|
|
|
|
continue
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
try:
|
2019-01-24 20:39:23 +00:00
|
|
|
fld = get_target_folder(path)
|
2019-06-11 18:45:48 +00:00
|
|
|
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
|
|
|
'win') and settings.general.getboolean('chmod_enabled') else None
|
2019-01-16 14:34:33 +00:00
|
|
|
saved_subtitles = save_subtitles(video.original_path, subtitles, single=single,
|
2018-11-29 13:03:44 +00:00
|
|
|
tags=None, # fixme
|
2019-01-24 20:39:23 +00:00
|
|
|
directory=fld,
|
2019-02-27 20:55:06 +00:00
|
|
|
chmod=chmod,
|
2018-11-29 13:03:44 +00:00
|
|
|
# formats=("srt", "vtt")
|
|
|
|
path_decoder=force_unicode
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logging.exception('BAZARR Error saving subtitles file to disk for this file:' + path)
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
saved_any = True
|
|
|
|
for subtitle in saved_subtitles:
|
|
|
|
downloaded_provider = subtitle.provider_name
|
2019-03-20 00:16:19 +00:00
|
|
|
if subtitle.language == 'pt-BR':
|
|
|
|
downloaded_language_code3 = 'pob'
|
|
|
|
else:
|
2019-03-20 03:44:50 +00:00
|
|
|
downloaded_language_code3 = subtitle.language.alpha3
|
2019-03-20 00:16:19 +00:00
|
|
|
downloaded_language = language_from_alpha3(downloaded_language_code3)
|
|
|
|
downloaded_language_code2 = alpha2_from_alpha3(downloaded_language_code3)
|
2018-11-29 13:03:44 +00:00
|
|
|
downloaded_path = subtitle.storage_path
|
2019-05-02 00:26:48 +00:00
|
|
|
is_forced_string = " forced" if subtitle.language.forced else ""
|
2018-11-29 13:03:44 +00:00
|
|
|
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
|
2019-03-20 10:33:11 +00:00
|
|
|
if is_upgrade:
|
|
|
|
action = "upgraded"
|
|
|
|
else:
|
|
|
|
action = "downloaded"
|
2018-11-29 13:03:44 +00:00
|
|
|
if video.used_scene_name:
|
2019-05-02 00:26:48 +00:00
|
|
|
message = downloaded_language + is_forced_string + " subtitles " + action + " from " + downloaded_provider + " with a score of " + unicode(
|
2018-11-30 13:35:28 +00:00
|
|
|
round(subtitle.score * 100 / max_score, 2)) + "% using this scene name: " + sceneName
|
2018-10-23 03:08:44 +00:00
|
|
|
else:
|
2019-05-02 00:26:48 +00:00
|
|
|
message = downloaded_language + is_forced_string + " subtitles " + action + " from " + downloaded_provider + " with a score of " + unicode(
|
2018-11-30 13:35:28 +00:00
|
|
|
round(subtitle.score * 100 / max_score, 2)) + "% using filename guessing."
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
if use_postprocessing is True:
|
|
|
|
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
|
2019-06-11 18:45:48 +00:00
|
|
|
downloaded_language_code2, downloaded_language_code3,
|
|
|
|
subtitle.language.forced)
|
2018-11-29 13:03:44 +00:00
|
|
|
try:
|
|
|
|
if os.name == 'nt':
|
|
|
|
codepage = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
2018-10-12 02:24:27 +00:00
|
|
|
# wait for the process to terminate
|
2018-11-29 13:03:44 +00:00
|
|
|
out_codepage, err_codepage = codepage.communicate()
|
|
|
|
encoding = out_codepage.split(':')[-1].strip()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
# wait for the process to terminate
|
|
|
|
out, err = process.communicate()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
if os.name == 'nt':
|
|
|
|
out = out.decode(encoding)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
except:
|
|
|
|
if out == "":
|
|
|
|
logging.error(
|
|
|
|
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
|
|
|
else:
|
|
|
|
logging.error('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
|
|
|
else:
|
|
|
|
if out == "":
|
|
|
|
logging.info(
|
|
|
|
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
2018-10-12 02:24:27 +00:00
|
|
|
else:
|
2018-11-29 13:03:44 +00:00
|
|
|
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
# fixme: support multiple languages at once
|
2019-03-11 21:44:49 +00:00
|
|
|
if media_type == 'series':
|
|
|
|
reversed_path = path_replace_reverse(path)
|
|
|
|
else:
|
|
|
|
reversed_path = path_replace_reverse_movie(path)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-05-02 00:26:48 +00:00
|
|
|
return message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score, subtitle.language.forced
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:03:44 +00:00
|
|
|
if not saved_any:
|
|
|
|
logging.debug('BAZARR No subtitles were found for this file: ' + path)
|
|
|
|
return None
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-14 22:14:45 +00:00
|
|
|
subliminal.region.backend.sync()
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Ended searching subtitles for file: ' + path)
|
2017-10-16 23:27:19 +00:00
|
|
|
|
2018-10-31 19:34:40 +00:00
|
|
|
|
2019-03-31 03:09:52 +00:00
|
|
|
def manual_search(path, language, hi, forced, providers, providers_auth, sceneName, title, media_type):
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Manually searching subtitles for this file: ' + path)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
final_subtitles = []
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
if hi == "True":
|
2019-05-03 17:37:29 +00:00
|
|
|
hi = "force HI"
|
2018-09-10 21:10:33 +00:00
|
|
|
else:
|
2019-05-03 17:37:29 +00:00
|
|
|
hi = "force non-HI"
|
2018-09-10 21:10:33 +00:00
|
|
|
language_set = set()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-03 02:15:45 +00:00
|
|
|
if forced == "True":
|
|
|
|
providers_auth['podnapisi']['only_foreign'] = True
|
|
|
|
providers_auth['subscene']['only_foreign'] = True
|
|
|
|
providers_auth['opensubtitles']['only_foreign'] = True
|
|
|
|
else:
|
|
|
|
providers_auth['podnapisi']['only_foreign'] = False
|
|
|
|
providers_auth['subscene']['only_foreign'] = False
|
|
|
|
providers_auth['opensubtitles']['only_foreign'] = False
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
for lang in ast.literal_eval(language):
|
2018-10-12 03:44:34 +00:00
|
|
|
lang = alpha3_from_alpha2(lang)
|
|
|
|
if lang == 'pob':
|
2019-03-31 03:09:52 +00:00
|
|
|
lang_obj = Language('por', 'BR')
|
2019-04-03 14:59:48 +00:00
|
|
|
if forced == "True":
|
|
|
|
lang_obj = Language.rebuild(lang_obj, forced=True)
|
2018-09-10 21:10:33 +00:00
|
|
|
else:
|
2019-03-31 03:09:52 +00:00
|
|
|
lang_obj = Language(lang)
|
2019-04-03 14:59:48 +00:00
|
|
|
if forced == "True":
|
2019-04-03 15:48:46 +00:00
|
|
|
lang_obj = Language.rebuild(lang_obj, forced=True)
|
2019-03-31 03:09:52 +00:00
|
|
|
language_set.add(lang_obj)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-12-27 19:19:59 +00:00
|
|
|
use_scenename = settings.general.getboolean('use_scenename')
|
2019-07-10 13:36:49 +00:00
|
|
|
use_mediainfo = settings.general.getboolean('use_mediainfo')
|
2019-01-02 19:19:31 +00:00
|
|
|
minimum_score = settings.general.minimum_score
|
|
|
|
minimum_score_movie = settings.general.minimum_score_movie
|
2018-12-27 19:19:59 +00:00
|
|
|
use_postprocessing = settings.general.getboolean('use_postprocessing')
|
2018-12-15 00:36:28 +00:00
|
|
|
postprocessing_cmd = settings.general.postprocessing_cmd
|
2019-07-28 19:49:19 +00:00
|
|
|
if providers:
|
|
|
|
video = get_video(force_unicode(path), title, sceneName, use_scenename, use_mediainfo, providers=providers,
|
|
|
|
media_type=media_type)
|
|
|
|
else:
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return None
|
2018-11-29 13:03:44 +00:00
|
|
|
if video:
|
|
|
|
min_score, max_score, scores = get_scores(video, media_type, min_score_movie_perc=int(minimum_score_movie),
|
|
|
|
min_score_series_perc=int(minimum_score))
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
try:
|
2019-01-24 14:00:03 +00:00
|
|
|
if providers:
|
2019-03-31 03:09:52 +00:00
|
|
|
subtitles = list_all_subtitles([video], language_set,
|
|
|
|
providers=providers,
|
|
|
|
provider_configs=providers_auth,
|
|
|
|
throttle_callback=provider_throttle,
|
|
|
|
language_hook=None) # fixme
|
2019-01-24 14:00:03 +00:00
|
|
|
else:
|
|
|
|
subtitles = []
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
2019-07-28 19:49:19 +00:00
|
|
|
return None
|
2018-09-10 21:10:33 +00:00
|
|
|
except Exception as e:
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.exception("BAZARR Error trying to get subtitle list from provider for this file: " + path)
|
2018-09-10 21:10:33 +00:00
|
|
|
else:
|
|
|
|
subtitles_list = []
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
for s in subtitles[video]:
|
|
|
|
try:
|
|
|
|
matches = s.get_matches(video)
|
|
|
|
except AttributeError:
|
|
|
|
continue
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
# skip wrong season/episodes
|
|
|
|
if media_type == "series":
|
|
|
|
can_verify_series = True
|
|
|
|
if not s.hash_verifiable and "hash" in matches:
|
|
|
|
can_verify_series = False
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
if can_verify_series and not {"series", "season", "episode"}.issubset(matches):
|
|
|
|
logging.debug(u"BAZARR Skipping %s, because it doesn't match our series/episode", s)
|
2018-09-10 21:10:33 +00:00
|
|
|
continue
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
score = compute_score(matches, s, video, hearing_impaired=hi)
|
2019-01-15 12:26:26 +00:00
|
|
|
not_matched = scores - matches
|
2018-11-29 13:40:31 +00:00
|
|
|
s.score = score
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-10-31 19:34:40 +00:00
|
|
|
subtitles_list.append(
|
2018-11-29 11:02:15 +00:00
|
|
|
dict(score=round((score / max_score * 100), 2),
|
2019-04-04 11:11:53 +00:00
|
|
|
language=str(s.language), hearing_impaired=str(s.hearing_impaired),
|
2018-11-29 11:02:15 +00:00
|
|
|
provider=s.provider_name,
|
|
|
|
subtitle=codecs.encode(pickle.dumps(s.make_picklable()), "base64").decode(),
|
|
|
|
url=s.page_link, matches=list(matches), dont_matches=list(not_matched)))
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
final_subtitles = sorted(subtitles_list, key=lambda x: x['score'], reverse=True)
|
|
|
|
logging.debug('BAZARR ' + str(len(final_subtitles)) + " subtitles have been found for this file: " + path)
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Ended searching subtitles for this file: ' + path)
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-14 22:14:45 +00:00
|
|
|
subliminal.region.backend.sync()
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2018-11-29 11:02:15 +00:00
|
|
|
return final_subtitles
|
2018-10-31 19:34:40 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
|
2019-06-11 18:45:48 +00:00
|
|
|
def manual_download_subtitle(path, language, hi, forced, subtitle, provider, providers_auth, sceneName, title,
|
|
|
|
media_type):
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Manually downloading subtitles for this file: ' + path)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-06-04 00:16:37 +00:00
|
|
|
if settings.general.getboolean('utf8_encode'):
|
|
|
|
os.environ["SZ_KEEP_ENCODING"] = ""
|
|
|
|
else:
|
2019-08-16 02:01:42 +00:00
|
|
|
os.environ["SZ_KEEP_ENCODING"] = "True"
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
|
2018-12-27 19:19:59 +00:00
|
|
|
use_scenename = settings.general.getboolean('use_scenename')
|
2019-07-10 13:36:49 +00:00
|
|
|
use_mediainfo = settings.general.getboolean('use_mediainfo')
|
2018-12-27 19:19:59 +00:00
|
|
|
use_postprocessing = settings.general.getboolean('use_postprocessing')
|
2018-12-15 00:36:28 +00:00
|
|
|
postprocessing_cmd = settings.general.postprocessing_cmd
|
2019-01-02 19:19:31 +00:00
|
|
|
single = settings.general.getboolean('single_language')
|
2019-07-28 19:49:19 +00:00
|
|
|
video = get_video(force_unicode(path), title, sceneName, use_scenename, use_mediainfo, providers={provider},
|
|
|
|
media_type=media_type)
|
2018-11-29 13:40:31 +00:00
|
|
|
if video:
|
|
|
|
min_score, max_score, scores = get_scores(video, media_type)
|
2018-09-10 21:10:33 +00:00
|
|
|
try:
|
2019-01-24 14:00:03 +00:00
|
|
|
if provider:
|
|
|
|
download_subtitles([subtitle], providers={provider}, provider_configs=providers_auth,
|
2019-03-31 03:09:52 +00:00
|
|
|
pool_class=provider_pool(), throttle_callback=provider_throttle)
|
2019-01-24 14:00:03 +00:00
|
|
|
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
|
|
|
|
else:
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return None
|
2018-09-10 21:10:33 +00:00
|
|
|
except Exception as e:
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.exception('BAZARR Error downloading subtitles for this file ' + path)
|
2018-09-10 21:10:33 +00:00
|
|
|
return None
|
|
|
|
else:
|
2019-01-15 12:20:39 +00:00
|
|
|
if not subtitle.is_valid():
|
2019-01-27 14:17:23 +00:00
|
|
|
logging.exception('BAZARR No valid subtitles file found for this file: ' + path)
|
2019-01-15 12:20:39 +00:00
|
|
|
return
|
2019-01-27 14:17:23 +00:00
|
|
|
logging.debug('BAZARR Subtitles file downloaded for this file:' + path)
|
2018-09-10 21:10:33 +00:00
|
|
|
try:
|
2018-11-29 13:40:31 +00:00
|
|
|
score = round(subtitle.score / max_score * 100, 2)
|
2019-03-02 17:10:55 +00:00
|
|
|
fld = get_target_folder(path)
|
2019-06-11 18:45:48 +00:00
|
|
|
chmod = int(settings.general.chmod, 8) if not sys.platform.startswith(
|
|
|
|
'win') and settings.general.getboolean('chmod_enabled') else None
|
2019-01-16 14:34:33 +00:00
|
|
|
saved_subtitles = save_subtitles(video.original_path, [subtitle], single=single,
|
2019-03-02 17:10:55 +00:00
|
|
|
tags=None, # fixme
|
|
|
|
directory=fld,
|
|
|
|
chmod=chmod,
|
|
|
|
# formats=("srt", "vtt")
|
2018-11-29 13:47:56 +00:00
|
|
|
path_decoder=force_unicode)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-10 21:10:33 +00:00
|
|
|
except Exception as e:
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.exception('BAZARR Error saving subtitles file to disk for this file:' + path)
|
2018-11-29 13:40:31 +00:00
|
|
|
return
|
2018-09-10 21:10:33 +00:00
|
|
|
else:
|
2018-11-29 13:40:31 +00:00
|
|
|
if saved_subtitles:
|
|
|
|
for saved_subtitle in saved_subtitles:
|
|
|
|
downloaded_provider = saved_subtitle.provider_name
|
2019-03-20 00:16:19 +00:00
|
|
|
if saved_subtitle.language == 'pt-BR':
|
|
|
|
downloaded_language_code3 = 'pob'
|
|
|
|
else:
|
2019-03-20 03:44:50 +00:00
|
|
|
downloaded_language_code3 = subtitle.language.alpha3
|
2019-03-20 00:16:19 +00:00
|
|
|
downloaded_language = language_from_alpha3(downloaded_language_code3)
|
|
|
|
downloaded_language_code2 = alpha2_from_alpha3(downloaded_language_code3)
|
2018-11-29 13:40:31 +00:00
|
|
|
downloaded_path = saved_subtitle.storage_path
|
|
|
|
logging.debug('BAZARR Subtitles file saved to disk: ' + downloaded_path)
|
2019-05-02 00:26:48 +00:00
|
|
|
is_forced_string = " forced" if subtitle.language.forced else ""
|
|
|
|
message = downloaded_language + is_forced_string + " subtitles downloaded from " + downloaded_provider + " with a score of " + unicode(
|
2018-11-29 13:40:31 +00:00
|
|
|
score) + "% using manual search."
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:40:31 +00:00
|
|
|
if use_postprocessing is True:
|
|
|
|
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language,
|
2019-06-11 18:45:48 +00:00
|
|
|
downloaded_language_code2, downloaded_language_code3,
|
|
|
|
subtitle.language.forced)
|
2018-11-29 13:40:31 +00:00
|
|
|
try:
|
|
|
|
if os.name == 'nt':
|
|
|
|
codepage = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
# wait for the process to terminate
|
|
|
|
out_codepage, err_codepage = codepage.communicate()
|
|
|
|
encoding = out_codepage.split(':')[-1].strip()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:40:31 +00:00
|
|
|
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
# wait for the process to terminate
|
|
|
|
out, err = process.communicate()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:40:31 +00:00
|
|
|
if os.name == 'nt':
|
|
|
|
out = out.decode(encoding)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-11-29 13:40:31 +00:00
|
|
|
except:
|
|
|
|
if out == "":
|
|
|
|
logging.error(
|
|
|
|
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
|
|
|
else:
|
|
|
|
logging.error('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
2018-10-12 02:24:27 +00:00
|
|
|
else:
|
2018-11-29 13:40:31 +00:00
|
|
|
if out == "":
|
|
|
|
logging.info(
|
|
|
|
'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution')
|
|
|
|
else:
|
|
|
|
logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-03-17 18:57:23 +00:00
|
|
|
if media_type == 'series':
|
|
|
|
reversed_path = path_replace_reverse(path)
|
|
|
|
else:
|
|
|
|
reversed_path = path_replace_reverse_movie(path)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-05-02 00:26:48 +00:00
|
|
|
return message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score, subtitle.language.forced
|
2018-10-12 02:24:27 +00:00
|
|
|
else:
|
2018-10-31 19:34:40 +00:00
|
|
|
logging.error(
|
2019-01-24 14:00:03 +00:00
|
|
|
"BAZARR Tried to manually download a subtitles for file: " + path + " but we weren't able to do (probably throttled by " + str(
|
|
|
|
subtitle.provider_name) + ". Please retry later or select a subtitles from another provider.")
|
2018-10-12 02:24:27 +00:00
|
|
|
return None
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-14 22:14:45 +00:00
|
|
|
subliminal.region.backend.sync()
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.debug('BAZARR Ended manually downloading subtitles for file: ' + path)
|
2018-09-10 21:10:33 +00:00
|
|
|
|
2018-10-31 19:34:40 +00:00
|
|
|
|
2017-10-16 23:27:19 +00:00
|
|
|
def series_download_subtitles(no):
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_details_clause = [
|
2019-08-16 01:07:40 +00:00
|
|
|
(TableEpisodes.sonarr_series_id == no),
|
|
|
|
(TableEpisodes.missing_subtitles != '[]')
|
|
|
|
]
|
2019-01-06 17:15:43 +00:00
|
|
|
if settings.sonarr.getboolean('only_monitored'):
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_details_clause.append(
|
2019-08-16 01:07:40 +00:00
|
|
|
(TableEpisodes.monitored == 'True')
|
|
|
|
)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2019-08-16 01:07:40 +00:00
|
|
|
episodes_details = TableEpisodes.select(
|
|
|
|
TableEpisodes.path,
|
|
|
|
TableEpisodes.missing_subtitles,
|
|
|
|
TableEpisodes.sonarr_episode_id,
|
|
|
|
TableEpisodes.scene_name
|
|
|
|
).where(
|
2019-08-17 14:24:55 +00:00
|
|
|
reduce(operator.and_, episodes_details_clause)
|
2019-08-16 01:07:40 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
series_details = TableShows.select(
|
|
|
|
TableShows.hearing_impaired,
|
|
|
|
TableShows.title,
|
|
|
|
TableShows.forced
|
|
|
|
).where(
|
|
|
|
TableShows.sonarr_series_id == no
|
|
|
|
).first()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-22 22:07:46 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2019-08-16 01:07:40 +00:00
|
|
|
count_episodes_details = episodes_details.count()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-04 22:33:49 +00:00
|
|
|
for i, episode in enumerate(episodes_details, 1):
|
2019-07-28 19:49:19 +00:00
|
|
|
if providers_list:
|
2019-08-16 01:07:40 +00:00
|
|
|
for language in ast.literal_eval(episode.missing_subtitles):
|
2019-07-28 19:49:19 +00:00
|
|
|
if language is not None:
|
|
|
|
notifications.write(msg='Searching for series subtitles...', queue='get_subtitle', item=i,
|
|
|
|
length=count_episodes_details)
|
2019-08-16 01:07:40 +00:00
|
|
|
result = download_subtitle(path_replace(episode.path),
|
|
|
|
str(alpha3_from_alpha2(language.split(':'))),
|
|
|
|
series_details.hearing_impaired,
|
2019-08-16 02:14:03 +00:00
|
|
|
"True" if len(language.split(':')) > 1 else "False",
|
2019-08-16 01:07:40 +00:00
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
|
|
|
str(episode.scene_name),
|
|
|
|
series_details.title,
|
2019-07-28 19:49:19 +00:00
|
|
|
'series')
|
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
forced = result[5]
|
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-16 01:07:40 +00:00
|
|
|
store_subtitles(path_replace(episode.path))
|
|
|
|
history_log(1, no, episode.sonarr_episode_id, message, path, language_code, provider, score)
|
|
|
|
send_notifications(no, episode.sonarr_episode_id, message)
|
2019-07-28 19:49:19 +00:00
|
|
|
else:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
break
|
2017-11-16 18:42:23 +00:00
|
|
|
list_missing_subtitles(no)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-23 01:08:26 +00:00
|
|
|
if count_episodes_details:
|
2019-06-11 18:45:48 +00:00
|
|
|
notifications.write(msg='Searching completed. Please reload the page.', type='success', duration='permanent',
|
|
|
|
button='refresh', queue='get_subtitle')
|
2019-02-21 04:30:25 +00:00
|
|
|
|
2018-04-24 14:48:52 +00:00
|
|
|
|
2019-04-14 03:00:53 +00:00
|
|
|
def episode_download_subtitles(no):
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_details_clause = [
|
|
|
|
(TableEpisodes.sonarr_series_id == no)
|
|
|
|
]
|
2019-04-14 03:00:53 +00:00
|
|
|
if settings.sonarr.getboolean('only_monitored'):
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_details_clause.append(
|
|
|
|
(TableEpisodes.monitored == 'True')
|
|
|
|
)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_details = TableEpisodes.select(
|
|
|
|
TableEpisodes.path,
|
|
|
|
TableEpisodes.missing_subtitles,
|
|
|
|
TableEpisodes.sonarr_episode_id,
|
|
|
|
TableEpisodes.scene_name,
|
|
|
|
TableShows.hearing_impaired,
|
|
|
|
TableShows.title,
|
|
|
|
TableShows.sonarr_series_id,
|
|
|
|
TableShows.forced
|
|
|
|
).join_from(
|
|
|
|
TableEpisodes, TableShows, JOIN.LEFT_OUTER
|
|
|
|
).where(
|
|
|
|
reduce(operator.and_, episodes_details_clause)
|
|
|
|
)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-14 03:00:53 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-14 03:00:53 +00:00
|
|
|
for episode in episodes_details:
|
2019-07-28 19:49:19 +00:00
|
|
|
if providers_list:
|
2019-08-17 14:24:55 +00:00
|
|
|
for language in ast.literal_eval(episode.missing_subtitles):
|
2019-07-28 19:49:19 +00:00
|
|
|
if language is not None:
|
|
|
|
notifications.write(msg='Searching for ' + str(
|
2019-08-17 14:24:55 +00:00
|
|
|
language_from_alpha2(language)) + ' subtitles for this episode: ' + path_replace(episode.path),
|
2019-07-28 19:49:19 +00:00
|
|
|
queue='get_subtitle')
|
2019-08-17 14:24:55 +00:00
|
|
|
result = download_subtitle(path_replace(episode.path),
|
2019-08-15 20:36:53 +00:00
|
|
|
str(alpha3_from_alpha2(language.split(':')[0])),
|
2019-08-17 14:24:55 +00:00
|
|
|
episode.hearing_impaired,
|
2019-08-16 01:55:21 +00:00
|
|
|
"True" if len(language.split(':')) > 1 else "False",
|
2019-08-15 20:36:53 +00:00
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
2019-08-17 14:24:55 +00:00
|
|
|
str(episode.scene_name),
|
|
|
|
episode.title,
|
2019-08-15 20:36:53 +00:00
|
|
|
'series')
|
2019-07-28 19:49:19 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
forced = result[5]
|
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-17 14:24:55 +00:00
|
|
|
store_subtitles(path_replace(episode.path))
|
|
|
|
history_log(1, episode.sonarr_series_id, episode.sonarr_episode_id, message, path, language_code, provider, score)
|
|
|
|
send_notifications(episode.sonarr_series_id, episode.sonarr_episode_id, message)
|
|
|
|
list_missing_subtitles(episode.sonarr_series_id)
|
2019-07-28 19:49:19 +00:00
|
|
|
else:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
break
|
2019-02-21 04:30:25 +00:00
|
|
|
|
2018-04-24 14:48:52 +00:00
|
|
|
|
|
|
|
def movies_download_subtitles(no):
|
2019-08-17 14:24:55 +00:00
|
|
|
movie = TableMovies.select(
|
|
|
|
TableMovies.path,
|
|
|
|
TableMovies.missing_subtitles,
|
|
|
|
TableMovies.radarr_id,
|
|
|
|
TableMovies.scene_name,
|
|
|
|
TableMovies.hearing_impaired,
|
|
|
|
TableMovies.title,
|
|
|
|
TableMovies.forced
|
|
|
|
).where(
|
|
|
|
TableMovies.radarr_id == no
|
2019-08-20 00:30:57 +00:00
|
|
|
).first()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2018-09-22 22:07:46 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2019-08-17 14:24:55 +00:00
|
|
|
count_movie = len(ast.literal_eval(movie.missing_subtitles))
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-17 14:24:55 +00:00
|
|
|
for i, language in enumerate(ast.literal_eval(movie.missing_subtitles), 1):
|
2019-07-28 19:49:19 +00:00
|
|
|
if providers_list:
|
|
|
|
if language is not None:
|
|
|
|
notifications.write(msg='Searching for movies subtitles', queue='get_subtitle', item=i,
|
|
|
|
length=count_movie)
|
2019-08-17 14:24:55 +00:00
|
|
|
result = download_subtitle(path_replace_movie(movie.path),
|
2019-08-15 20:36:53 +00:00
|
|
|
str(alpha3_from_alpha2(language.split(':')[0])),
|
2019-08-17 14:24:55 +00:00
|
|
|
movie.hearing_impaired,
|
2019-08-16 01:55:21 +00:00
|
|
|
"True" if len(language.split(':')) > 1 else "False",
|
2019-08-15 20:36:53 +00:00
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
2019-08-17 14:24:55 +00:00
|
|
|
str(movie.scene_name),
|
|
|
|
movie.title,
|
2019-08-15 20:36:53 +00:00
|
|
|
'movie')
|
2019-07-28 19:49:19 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
forced = result[5]
|
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-17 14:24:55 +00:00
|
|
|
store_subtitles_movie(path_replace_movie(movie.path))
|
2019-07-28 19:49:19 +00:00
|
|
|
history_log_movie(1, no, message, path, language_code, provider, score)
|
|
|
|
send_notifications_movie(no, message)
|
|
|
|
else:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
break
|
2018-04-24 14:48:52 +00:00
|
|
|
list_missing_subtitles_movies(no)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-23 01:08:26 +00:00
|
|
|
if count_movie:
|
2019-06-11 18:45:48 +00:00
|
|
|
notifications.write(msg='Searching completed. Please reload the page.', type='success', duration='permanent',
|
|
|
|
button='refresh', queue='get_subtitle')
|
2019-02-21 04:30:25 +00:00
|
|
|
|
2018-04-24 14:48:52 +00:00
|
|
|
|
2019-04-04 22:33:49 +00:00
|
|
|
def wanted_download_subtitles(path, l, count_episodes):
|
2019-08-17 14:24:55 +00:00
|
|
|
|
|
|
|
episodes_details = TableEpisodes.select(
|
|
|
|
TableEpisodes.path,
|
|
|
|
TableEpisodes.missing_subtitles,
|
|
|
|
TableEpisodes.sonarr_episode_id,
|
|
|
|
TableEpisodes.sonarr_series_id,
|
|
|
|
TableShows.hearing_impaired,
|
|
|
|
TableEpisodes.scene_name,
|
|
|
|
TableEpisodes.failed_attempts,
|
|
|
|
TableShows.title,
|
|
|
|
TableShows.forced
|
|
|
|
).join_from(
|
|
|
|
TableEpisodes, TableShows, JOIN.LEFT_OUTER
|
|
|
|
).where(
|
2019-08-22 18:13:48 +00:00
|
|
|
(TableEpisodes.path == path_replace_reverse(path)) &
|
|
|
|
(TableEpisodes.missing_subtitles != '[]')
|
2019-08-20 00:30:57 +00:00
|
|
|
).objects()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-22 22:07:46 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2017-11-16 19:09:40 +00:00
|
|
|
for episode in episodes_details:
|
2019-08-20 00:30:57 +00:00
|
|
|
attempt = episode.failed_attempts
|
2018-08-27 00:53:02 +00:00
|
|
|
if type(attempt) == unicode:
|
|
|
|
attempt = ast.literal_eval(attempt)
|
2019-08-20 00:30:57 +00:00
|
|
|
for language in ast.literal_eval(episode.missing_subtitles):
|
2018-08-27 00:53:02 +00:00
|
|
|
if attempt is None:
|
|
|
|
attempt = []
|
|
|
|
attempt.append([language, time.time()])
|
|
|
|
else:
|
|
|
|
att = zip(*attempt)[0]
|
|
|
|
if language not in att:
|
|
|
|
attempt.append([language, time.time()])
|
2019-08-17 14:24:55 +00:00
|
|
|
|
|
|
|
TableEpisodes.update(
|
|
|
|
{
|
|
|
|
TableEpisodes.failed_attempts: unicode(attempt)
|
|
|
|
}
|
|
|
|
).where(
|
|
|
|
TableEpisodes.sonarr_episode_id == episode.sonarr_episode_id
|
|
|
|
).execute()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-08-27 00:53:02 +00:00
|
|
|
for i in range(len(attempt)):
|
|
|
|
if attempt[i][0] == language:
|
2019-01-24 14:00:03 +00:00
|
|
|
if search_active(attempt[i][1]):
|
2019-06-11 18:45:48 +00:00
|
|
|
notifications.write(msg='Searching for series subtitles...', queue='get_subtitle', item=l,
|
|
|
|
length=count_episodes)
|
2019-08-17 14:24:55 +00:00
|
|
|
result = download_subtitle(path_replace(episode.path),
|
2019-08-15 20:36:53 +00:00
|
|
|
str(alpha3_from_alpha2(language.split(':')[0])),
|
2019-08-17 14:24:55 +00:00
|
|
|
episode.hearing_impaired,
|
2019-08-16 01:55:21 +00:00
|
|
|
"True" if len(language.split(':')) > 1 else "False",
|
2019-08-15 20:36:53 +00:00
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
2019-08-17 14:24:55 +00:00
|
|
|
str(episode.scene_name),
|
|
|
|
episode.title,
|
2019-08-15 20:36:53 +00:00
|
|
|
'series')
|
2019-03-11 21:44:49 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
2019-05-02 00:26:48 +00:00
|
|
|
forced = result[5]
|
2019-06-16 19:24:47 +00:00
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
2019-03-11 21:44:49 +00:00
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-17 14:24:55 +00:00
|
|
|
store_subtitles(path_replace(episode.path))
|
2019-08-20 00:30:57 +00:00
|
|
|
list_missing_subtitles(episode.sonarr_series_id.sonarr_series_id)
|
|
|
|
history_log(1, episode.sonarr_series_id.sonarr_series_id, episode.sonarr_episode_id, message, path, language_code, provider, score)
|
|
|
|
send_notifications(episode.sonarr_series_id.sonarr_series_id, episode.sonarr_episode_id, message)
|
2018-08-27 00:53:02 +00:00
|
|
|
else:
|
2018-10-31 19:34:40 +00:00
|
|
|
logging.debug(
|
2019-08-17 14:24:55 +00:00
|
|
|
'BAZARR Search is not active for episode ' + episode.path + ' Language: ' + attempt[i][0])
|
2017-11-16 17:04:20 +00:00
|
|
|
|
2018-06-06 00:06:00 +00:00
|
|
|
|
2019-04-04 22:33:49 +00:00
|
|
|
def wanted_download_subtitles_movie(path, l, count_movies):
|
2019-08-17 14:24:55 +00:00
|
|
|
movies_details = TableMovies.select(
|
|
|
|
TableMovies.path,
|
|
|
|
TableMovies.missing_subtitles,
|
|
|
|
TableMovies.radarr_id,
|
|
|
|
TableMovies.hearing_impaired,
|
|
|
|
TableMovies.scene_name,
|
|
|
|
TableMovies.failed_attempts,
|
|
|
|
TableMovies.title,
|
|
|
|
TableMovies.forced
|
|
|
|
).where(
|
2019-08-22 18:13:48 +00:00
|
|
|
(TableMovies.path == path_replace_reverse_movie(path)) &
|
|
|
|
(TableMovies.missing_subtitles != '[]')
|
2019-08-17 14:24:55 +00:00
|
|
|
)
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-09-22 22:07:46 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-06-06 00:06:00 +00:00
|
|
|
for movie in movies_details:
|
2019-08-20 00:30:57 +00:00
|
|
|
attempt = movie.failed_attempts
|
2018-08-27 00:53:02 +00:00
|
|
|
if type(attempt) == unicode:
|
|
|
|
attempt = ast.literal_eval(attempt)
|
2019-08-17 14:24:55 +00:00
|
|
|
for language in ast.literal_eval(movie.missing_subtitles):
|
2018-08-27 00:53:02 +00:00
|
|
|
if attempt is None:
|
|
|
|
attempt = []
|
|
|
|
attempt.append([language, time.time()])
|
|
|
|
else:
|
|
|
|
att = zip(*attempt)[0]
|
|
|
|
if language not in att:
|
|
|
|
attempt.append([language, time.time()])
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2019-08-17 14:24:55 +00:00
|
|
|
TableMovies.update(
|
|
|
|
{
|
|
|
|
TableMovies.failed_attempts: unicode(attempt)
|
|
|
|
}
|
|
|
|
).where(
|
|
|
|
TableMovies.radarr_id == movie.radarr_id
|
|
|
|
).execute()
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-08-27 00:53:02 +00:00
|
|
|
for i in range(len(attempt)):
|
|
|
|
if attempt[i][0] == language:
|
|
|
|
if search_active(attempt[i][1]) is True:
|
2019-06-11 18:45:48 +00:00
|
|
|
notifications.write(msg='Searching for movies subtitles...', queue='get_subtitle', item=l,
|
|
|
|
length=count_movies)
|
2019-08-17 14:24:55 +00:00
|
|
|
result = download_subtitle(path_replace_movie(movie.path),
|
2019-08-15 20:36:53 +00:00
|
|
|
str(alpha3_from_alpha2(language.split(':')[0])),
|
2019-08-17 14:24:55 +00:00
|
|
|
movie.hearing_impaired,
|
2019-08-16 01:55:21 +00:00
|
|
|
"True" if len(language.split(':')) > 1 else "False",
|
2019-08-15 20:36:53 +00:00
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
2019-08-17 14:24:55 +00:00
|
|
|
str(movie.scene_name),
|
|
|
|
movie.title,
|
2019-08-15 20:36:53 +00:00
|
|
|
'movie')
|
2019-03-11 21:44:49 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
2019-05-02 00:26:48 +00:00
|
|
|
forced = result[5]
|
2019-06-16 23:29:28 +00:00
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
2019-03-11 21:44:49 +00:00
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-17 14:24:55 +00:00
|
|
|
store_subtitles_movie(path_replace_movie(movie.path))
|
|
|
|
list_missing_subtitles_movies(movie.radarr_id)
|
|
|
|
history_log_movie(1, movie.radarr_id, message, path, language_code, provider, score)
|
|
|
|
send_notifications_movie(movie.radarr_id, message)
|
2018-08-27 00:53:02 +00:00
|
|
|
else:
|
2018-10-31 19:34:40 +00:00
|
|
|
logging.info(
|
2019-08-17 14:24:55 +00:00
|
|
|
'BAZARR Search is not active for movie ' + movie.path + ' Language: ' + attempt[i][0])
|
2018-06-06 00:06:00 +00:00
|
|
|
|
|
|
|
|
2017-10-23 03:00:11 +00:00
|
|
|
def wanted_search_missing_subtitles():
|
2018-12-27 19:19:59 +00:00
|
|
|
if settings.general.getboolean('use_sonarr'):
|
2019-08-17 14:24:55 +00:00
|
|
|
episodes_clause = [
|
|
|
|
(TableEpisodes.missing_subtitles != '[]')
|
|
|
|
]
|
|
|
|
if settings.sonarr.getboolean('only_monitored'):
|
|
|
|
episodes_clause.append(
|
|
|
|
(TableEpisodes.monitored == 'True')
|
|
|
|
)
|
|
|
|
|
|
|
|
episodes = TableEpisodes.select(
|
2019-08-20 00:30:57 +00:00
|
|
|
fn.path_substitution(TableEpisodes.path).alias('path')
|
2019-08-17 14:24:55 +00:00
|
|
|
).where(
|
|
|
|
reduce(operator.and_, episodes_clause)
|
|
|
|
)
|
|
|
|
|
|
|
|
count_episodes = episodes.count()
|
2019-07-28 19:49:19 +00:00
|
|
|
for i, episode in enumerate(episodes, 1):
|
|
|
|
providers = get_providers()
|
|
|
|
if providers:
|
2019-08-20 00:30:57 +00:00
|
|
|
wanted_download_subtitles(episode.path, i, count_episodes)
|
2019-07-28 19:49:19 +00:00
|
|
|
else:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-12-27 19:19:59 +00:00
|
|
|
if settings.general.getboolean('use_radarr'):
|
2019-08-17 14:24:55 +00:00
|
|
|
movies_clause = [
|
|
|
|
(TableMovies.missing_subtitles != '[]')
|
|
|
|
]
|
|
|
|
if settings.radarr.getboolean('only_monitored'):
|
|
|
|
movies_clause.append(
|
|
|
|
(TableMovies.monitored == 'True')
|
|
|
|
)
|
|
|
|
movies = TableMovies.select(
|
2019-08-20 00:30:57 +00:00
|
|
|
fn.path_substitution_movie(TableMovies.path).alias('path')
|
2019-08-17 14:24:55 +00:00
|
|
|
).where(
|
|
|
|
reduce(operator.and_, movies_clause)
|
|
|
|
)
|
|
|
|
|
|
|
|
count_movies = movies.count()
|
2019-07-28 19:49:19 +00:00
|
|
|
for i, movie in enumerate(movies, 1):
|
|
|
|
providers = get_providers()
|
|
|
|
if providers:
|
2019-08-20 00:30:57 +00:00
|
|
|
wanted_download_subtitles_movie(movie.path, i, count_movies)
|
2019-07-28 19:49:19 +00:00
|
|
|
else:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return
|
2019-01-15 16:25:13 +00:00
|
|
|
|
2018-10-19 03:33:01 +00:00
|
|
|
logging.info('BAZARR Finished searching for missing subtitles. Check histories for more information.')
|
2019-06-11 18:45:48 +00:00
|
|
|
|
|
|
|
notifications.write(msg='Searching completed. Please reload the page.', type='success', duration='permanent',
|
|
|
|
button='refresh', queue='get_subtitle')
|
2019-02-21 04:30:25 +00:00
|
|
|
|
2018-08-27 00:53:02 +00:00
|
|
|
|
|
|
|
def search_active(timestamp):
|
2019-01-06 17:15:43 +00:00
|
|
|
if settings.general.getboolean('adaptive_searching'):
|
2018-08-27 00:53:02 +00:00
|
|
|
search_deadline = timedelta(weeks=3)
|
|
|
|
search_delta = timedelta(weeks=1)
|
|
|
|
aa = datetime.fromtimestamp(float(timestamp))
|
|
|
|
attempt_datetime = datetime.strptime(str(aa).split(".")[0], '%Y-%m-%d %H:%M:%S')
|
|
|
|
attempt_search_deadline = attempt_datetime + search_deadline
|
|
|
|
today = datetime.today()
|
|
|
|
attempt_age_in_days = (today.date() - attempt_search_deadline.date()).days
|
|
|
|
if today.date() <= attempt_search_deadline.date():
|
|
|
|
return True
|
|
|
|
elif attempt_age_in_days % search_delta.days == 0:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2019-02-05 03:57:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
def refine_from_db(path, video):
|
|
|
|
if isinstance(video, Episode):
|
2019-08-17 14:24:55 +00:00
|
|
|
data = TableEpisodes.select(
|
|
|
|
TableShows.title.alias('seriesTitle'),
|
|
|
|
TableEpisodes.season,
|
|
|
|
TableEpisodes.episode,
|
|
|
|
TableEpisodes.title.alias('episodeTitle'),
|
|
|
|
TableShows.year,
|
|
|
|
TableShows.tvdb_id,
|
|
|
|
TableShows.alternate_titles,
|
|
|
|
TableEpisodes.format,
|
|
|
|
TableEpisodes.resolution,
|
|
|
|
TableEpisodes.video_codec,
|
2019-08-19 22:13:29 +00:00
|
|
|
TableEpisodes.audio_codec,
|
|
|
|
TableEpisodes.path
|
2019-08-17 14:24:55 +00:00
|
|
|
).join_from(
|
|
|
|
TableEpisodes, TableShows, JOIN.LEFT_OUTER
|
|
|
|
).where(
|
2019-08-19 22:13:29 +00:00
|
|
|
TableEpisodes.path == path_replace_reverse(path)
|
|
|
|
).objects().first()
|
2019-08-17 14:24:55 +00:00
|
|
|
|
2019-02-08 03:34:54 +00:00
|
|
|
if data:
|
2019-08-17 14:24:55 +00:00
|
|
|
video.series, year, country = series_re.match(data.seriesTitle).groups()
|
|
|
|
video.season = int(data.season)
|
|
|
|
video.episode = int(data.episode)
|
|
|
|
video.title = data.episodeTitle
|
|
|
|
if data.year:
|
|
|
|
if int(data.year) > 0: video.year = int(data.year)
|
|
|
|
video.series_tvdb_id = int(data.tvdb_id)
|
2019-08-19 22:13:29 +00:00
|
|
|
video.alternative_series = ast.literal_eval(data.alternate_titles)
|
2019-02-06 11:51:19 +00:00
|
|
|
if not video.format:
|
2019-08-17 14:24:55 +00:00
|
|
|
video.format = str(data.format)
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.resolution:
|
2019-08-17 14:24:55 +00:00
|
|
|
video.resolution = str(data.resolution)
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.video_codec:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.video_codec: video.video_codec = data.video_codec
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.audio_codec:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.audio_codec: video.audio_codec = data.audio_codec
|
2019-02-05 03:57:15 +00:00
|
|
|
elif isinstance(video, Movie):
|
2019-08-17 14:24:55 +00:00
|
|
|
data = TableMovies.select(
|
|
|
|
TableMovies.title,
|
|
|
|
TableMovies.year,
|
|
|
|
TableMovies.alternative_titles,
|
|
|
|
TableMovies.format,
|
|
|
|
TableMovies.resolution,
|
|
|
|
TableMovies.video_codec,
|
|
|
|
TableMovies.audio_codec,
|
|
|
|
TableMovies.imdb_id
|
|
|
|
).where(
|
|
|
|
TableMovies.path == unicode(path_replace_reverse_movie(path))
|
|
|
|
).first()
|
|
|
|
|
2019-02-08 03:34:54 +00:00
|
|
|
if data:
|
2019-08-17 14:24:55 +00:00
|
|
|
video.title = re.sub(r'(\(\d\d\d\d\))', '', data.title)
|
|
|
|
if data.year:
|
|
|
|
if int(data.year) > 0: video.year = int(data.year)
|
|
|
|
if data.imdb_id: video.imdb_id = data.imdb_id
|
2019-08-19 22:13:29 +00:00
|
|
|
video.alternative_titles = ast.literal_eval(data.alternative_titles)
|
2019-02-06 11:51:19 +00:00
|
|
|
if not video.format:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.format: video.format = data.format
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.resolution:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.resolution: video.resolution = data.resolution
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.video_codec:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.video_codec: video.video_codec = data.video_codec
|
2019-02-06 03:49:58 +00:00
|
|
|
if not video.audio_codec:
|
2019-08-17 14:24:55 +00:00
|
|
|
if data.audio_codec: video.audio_codec = data.audio_codec
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-02-05 03:57:15 +00:00
|
|
|
return video
|
2019-03-15 18:28:57 +00:00
|
|
|
|
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
def refine_from_mediainfo(path, video):
|
|
|
|
if video.fps:
|
|
|
|
return
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
exe = get_binary('mediainfo')
|
|
|
|
if not exe:
|
|
|
|
logging.debug('BAZARR MediaInfo library not found!')
|
|
|
|
return
|
2019-07-13 00:51:03 +00:00
|
|
|
else:
|
|
|
|
logging.debug('BAZARR MediaInfo library used is %s', exe)
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-14 22:14:45 +00:00
|
|
|
media_info = MediaInfo.parse(path, library_file=exe)
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
video_track = next((t for t in media_info.tracks if t.track_type == 'Video'), None)
|
|
|
|
if not video_track:
|
|
|
|
logging.debug('BAZARR MediaInfo was unable to find video tracks in the file!')
|
|
|
|
return
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
logging.debug('MediaInfo found: %s', video_track.to_data())
|
2019-07-28 19:49:19 +00:00
|
|
|
|
2019-07-10 13:36:49 +00:00
|
|
|
if not video.fps:
|
|
|
|
if video_track.frame_rate:
|
|
|
|
video.fps = float(video_track.frame_rate)
|
|
|
|
elif video_track.framerate_num and video_track.framerate_den:
|
|
|
|
video.fps = round(float(video_track.framerate_num) / float(video_track.framerate_den), 3)
|
|
|
|
|
|
|
|
|
2019-03-15 18:28:57 +00:00
|
|
|
def upgrade_subtitles():
|
|
|
|
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
2019-03-17 14:29:38 +00:00
|
|
|
minimum_timestamp = ((datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
|
|
|
datetime(1970, 1, 1)).total_seconds()
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-03-19 04:08:53 +00:00
|
|
|
if settings.general.getboolean('upgrade_manual'):
|
|
|
|
query_actions = [1, 2, 3]
|
|
|
|
else:
|
|
|
|
query_actions = [1, 3]
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-20 17:38:26 +00:00
|
|
|
if settings.general.getboolean('use_sonarr'):
|
2019-08-19 22:13:29 +00:00
|
|
|
upgradable_episodes = TableHistory.select(
|
|
|
|
TableHistory.video_path,
|
|
|
|
TableHistory.language,
|
|
|
|
TableHistory.score,
|
|
|
|
TableShows.hearing_impaired,
|
|
|
|
TableEpisodes.scene_name,
|
|
|
|
TableEpisodes.title,
|
|
|
|
TableEpisodes.sonarr_series_id,
|
|
|
|
TableEpisodes.sonarr_episode_id,
|
|
|
|
fn.MAX(TableHistory.timestamp).alias('timestamp'),
|
|
|
|
TableShows.languages,
|
|
|
|
TableShows.forced
|
|
|
|
).join_from(
|
|
|
|
TableHistory, TableShows, JOIN.LEFT_OUTER
|
|
|
|
).join_from(
|
|
|
|
TableHistory, TableEpisodes, JOIN.LEFT_OUTER
|
|
|
|
).where(
|
2019-08-22 18:13:48 +00:00
|
|
|
(TableHistory.action.in_(query_actions)) &
|
|
|
|
(TableHistory.score.is_null(False))
|
2019-08-19 22:13:29 +00:00
|
|
|
).group_by(
|
|
|
|
TableHistory.video_path,
|
|
|
|
TableHistory.language
|
|
|
|
).objects()
|
|
|
|
|
|
|
|
upgradable_episodes_not_perfect = []
|
|
|
|
for upgradable_episode in upgradable_episodes.dicts():
|
|
|
|
if upgradable_episode['timestamp'] > minimum_timestamp:
|
|
|
|
try:
|
|
|
|
int(upgradable_episode['score'])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if int(upgradable_episode['score']) < 360:
|
|
|
|
upgradable_episodes_not_perfect.append(upgradable_episode)
|
|
|
|
|
|
|
|
episodes_to_upgrade = []
|
|
|
|
for episode in upgradable_episodes_not_perfect:
|
|
|
|
if os.path.exists(path_replace(episode['video_path'])) and int(episode['score']) < 357:
|
2019-04-20 17:38:26 +00:00
|
|
|
episodes_to_upgrade.append(episode)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-20 17:38:26 +00:00
|
|
|
if settings.general.getboolean('use_radarr'):
|
2019-08-19 22:13:29 +00:00
|
|
|
upgradable_movies = TableHistoryMovie.select(
|
|
|
|
TableHistoryMovie.video_path,
|
|
|
|
TableHistoryMovie.language,
|
|
|
|
TableHistoryMovie.score,
|
|
|
|
TableMovies.hearing_impaired,
|
|
|
|
TableMovies.scene_name,
|
|
|
|
TableMovies.title,
|
|
|
|
TableMovies.radarr_id,
|
|
|
|
fn.MAX(TableHistoryMovie.timestamp).alias('timestamp'),
|
|
|
|
TableMovies.languages,
|
|
|
|
TableMovies.forced
|
|
|
|
).join_from(
|
|
|
|
TableHistoryMovie, TableMovies, JOIN.LEFT_OUTER
|
|
|
|
).where(
|
2019-08-22 18:13:48 +00:00
|
|
|
(TableHistoryMovie.action.in_(query_actions)) &
|
|
|
|
(TableHistoryMovie.score.is_null(False))
|
2019-08-19 22:13:29 +00:00
|
|
|
).group_by(
|
|
|
|
TableHistoryMovie.video_path,
|
|
|
|
TableHistoryMovie.language
|
|
|
|
).objects()
|
|
|
|
|
|
|
|
upgradable_movies_not_perfect = []
|
|
|
|
for upgradable_movie in upgradable_movies.dicts():
|
|
|
|
if upgradable_movie['timestamp'] > minimum_timestamp:
|
|
|
|
try:
|
|
|
|
int(upgradable_movie['score'])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if int(upgradable_movie['score']) < 360:
|
|
|
|
upgradable_movies_not_perfect.append(upgradable_movie)
|
|
|
|
|
|
|
|
movies_to_upgrade = []
|
|
|
|
for movie in upgradable_movies_not_perfect:
|
|
|
|
if os.path.exists(path_replace_movie(movie['video_path'])) and int(movie['score']) < 117:
|
2019-04-20 17:38:26 +00:00
|
|
|
movies_to_upgrade.append(movie)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-03-16 19:30:06 +00:00
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
2019-04-04 13:29:08 +00:00
|
|
|
|
|
|
|
count_episode_to_upgrade = len(episodes_to_upgrade)
|
|
|
|
count_movie_to_upgrade = len(movies_to_upgrade)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-20 17:38:26 +00:00
|
|
|
if settings.general.getboolean('use_sonarr'):
|
|
|
|
for i, episode in enumerate(episodes_to_upgrade, 1):
|
2019-07-28 19:49:19 +00:00
|
|
|
providers = get_providers()
|
|
|
|
if not providers:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return
|
2019-08-19 22:13:29 +00:00
|
|
|
if episode['languages'] != "None":
|
|
|
|
desired_languages = ast.literal_eval(str(episode['languages']))
|
|
|
|
if episode['forced'] == "True":
|
2019-05-10 13:24:11 +00:00
|
|
|
forced_languages = [l + ":forced" for l in desired_languages]
|
2019-08-19 22:13:29 +00:00
|
|
|
elif episode['forced'] == "Both":
|
2019-05-10 13:24:11 +00:00
|
|
|
forced_languages = [l + ":forced" for l in desired_languages] + desired_languages
|
|
|
|
else:
|
|
|
|
forced_languages = desired_languages
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
if episode['language'] in forced_languages:
|
2019-04-24 00:05:06 +00:00
|
|
|
notifications.write(msg='Upgrading series subtitles...',
|
|
|
|
queue='upgrade_subtitle', item=i, length=count_episode_to_upgrade)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
if episode['language'].endswith('forced'):
|
|
|
|
language = episode['language'].split(':')[0]
|
2019-05-10 13:24:11 +00:00
|
|
|
is_forced = "True"
|
|
|
|
else:
|
2019-08-19 22:13:29 +00:00
|
|
|
language = episode['language']
|
2019-05-10 13:24:11 +00:00
|
|
|
is_forced = "False"
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
result = download_subtitle(path_replace(episode['video_path']),
|
|
|
|
str(alpha3_from_alpha2(language)),
|
|
|
|
episode['hearing_impaired'],
|
|
|
|
is_forced,
|
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
|
|
|
str(episode['scene_name']),
|
|
|
|
episode['title'],
|
|
|
|
'series',
|
|
|
|
forced_minimum_score=int(episode['score']),
|
2019-05-10 13:24:11 +00:00
|
|
|
is_upgrade=True)
|
2019-04-24 00:05:06 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
2019-05-02 00:26:48 +00:00
|
|
|
forced = result[5]
|
2019-06-16 19:24:47 +00:00
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
2019-04-24 00:05:06 +00:00
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-19 22:13:29 +00:00
|
|
|
store_subtitles(path_replace(episode['video_path']))
|
|
|
|
history_log(3, episode['sonarr_series_id'], episode['sonarr_episode_id'], message, path, language_code, provider, score)
|
|
|
|
send_notifications(episode['sonarr_series_id'], episode['sonarr_episode_id'], message)
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-04-20 17:38:26 +00:00
|
|
|
if settings.general.getboolean('use_radarr'):
|
|
|
|
for i, movie in enumerate(movies_to_upgrade, 1):
|
2019-07-28 19:49:19 +00:00
|
|
|
providers = get_providers()
|
|
|
|
if not providers:
|
|
|
|
notifications.write(msg='BAZARR All providers are throttled', queue='get_subtitle', duration='long')
|
|
|
|
logging.info("BAZARR All providers are throttled")
|
|
|
|
return
|
2019-08-19 22:13:29 +00:00
|
|
|
if movie['languages'] != "None":
|
|
|
|
desired_languages = ast.literal_eval(str(movie['languages']))
|
|
|
|
if movie['forced'] == "True":
|
2019-05-10 13:24:11 +00:00
|
|
|
forced_languages = [l + ":forced" for l in desired_languages]
|
2019-08-19 22:13:29 +00:00
|
|
|
elif movie['forced'] == "Both":
|
2019-05-10 13:24:11 +00:00
|
|
|
forced_languages = [l + ":forced" for l in desired_languages] + desired_languages
|
|
|
|
else:
|
|
|
|
forced_languages = desired_languages
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
if movie['language'] in forced_languages:
|
2019-04-24 00:05:06 +00:00
|
|
|
notifications.write(msg='Upgrading movie subtitles...',
|
2019-06-11 18:45:48 +00:00
|
|
|
queue='upgrade_subtitle', item=i, length=count_movie_to_upgrade)
|
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
if movie['language'].endswith('forced'):
|
|
|
|
language = movie['language'].split(':')[0]
|
2019-05-10 13:24:11 +00:00
|
|
|
is_forced = "True"
|
|
|
|
else:
|
2019-08-19 22:13:29 +00:00
|
|
|
language = movie['language']
|
2019-05-10 13:24:11 +00:00
|
|
|
is_forced = "False"
|
2019-06-11 18:45:48 +00:00
|
|
|
|
2019-08-19 22:13:29 +00:00
|
|
|
result = download_subtitle(path_replace_movie(movie['video_path']),
|
|
|
|
str(alpha3_from_alpha2(language)),
|
|
|
|
movie['hearing_impaired'],
|
|
|
|
is_forced,
|
|
|
|
providers_list,
|
|
|
|
providers_auth,
|
|
|
|
str(movie['scene_name']),
|
|
|
|
movie['title'],
|
|
|
|
'movie',
|
|
|
|
forced_minimum_score=int(movie['score']),
|
|
|
|
is_upgrade=True)
|
2019-04-24 00:05:06 +00:00
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
2019-05-02 00:26:48 +00:00
|
|
|
forced = result[5]
|
2019-06-16 23:29:28 +00:00
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
2019-04-24 00:05:06 +00:00
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
2019-08-19 22:13:29 +00:00
|
|
|
store_subtitles_movie(path_replace_movie(movie['video_path']))
|
|
|
|
history_log_movie(3, movie['radarr_id'], message, path, language_code, provider, score)
|
|
|
|
send_notifications_movie(movie['radarr_id'], message)
|