1
0
Fork 0
mirror of https://github.com/morpheus65535/bazarr synced 2024-12-27 10:07:22 +00:00
bazarr/get_subtitle.py

437 lines
21 KiB
Python
Raw Normal View History

from get_argv import config_dir
2017-09-17 00:11:47 +00:00
import os
2017-10-16 23:27:19 +00:00
import sqlite3
import ast
2017-12-06 04:07:37 +00:00
import logging
2018-03-24 00:00:50 +00:00
import subprocess
import time
from datetime import datetime, timedelta
2018-08-15 20:51:46 +00:00
from babelfish import Language
from subliminal import region, scan_video, Video, download_best_subtitles, compute_score, save_subtitles, AsyncProviderPool, score, list_subtitles, download_subtitles
2018-10-01 00:23:12 +00:00
from subliminal.subtitle import get_subtitle_path
2018-08-15 20:51:46 +00:00
from get_languages import language_from_alpha3, alpha2_from_alpha3, alpha3_from_alpha2
2018-03-24 00:00:50 +00:00
from bs4 import UnicodeDammit
from get_settings import get_general_settings, pp_replace, path_replace, path_replace_movie, path_replace_reverse, path_replace_reverse_movie
2018-08-15 20:51:46 +00:00
from list_subtitles import store_subtitles, list_missing_subtitles, store_subtitles_movie, list_missing_subtitles_movies
from utils import history_log, history_log_movie
2018-04-24 14:48:52 +00:00
from notifier import send_notifications, send_notifications_movie
import cPickle as pickle
import codecs
2018-09-22 22:07:46 +00:00
from get_providers import get_providers, get_providers_auth
2017-09-17 00:11:47 +00:00
# configure the cache
2018-01-16 04:30:41 +00:00
region.configure('dogpile.cache.memory')
2017-09-17 00:11:47 +00:00
def download_subtitle(path, language, hi, providers, providers_auth, sceneName, media_type):
if hi == "True":
hi = True
else:
hi = False
if media_type == 'series':
2018-08-17 15:46:01 +00:00
type_of_score = 360
minimum_score = float(get_general_settings()[8]) / 100 * type_of_score
elif media_type == 'movie':
2018-08-17 15:46:01 +00:00
type_of_score = 120
minimum_score = float(get_general_settings()[22]) / 100 * type_of_score
use_scenename = get_general_settings()[9]
2018-03-24 00:00:50 +00:00
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
if language == 'pob':
lang_obj = Language('por', 'BR')
else:
lang_obj = Language(language)
2017-11-16 18:39:47 +00:00
try:
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception('Error trying to extract information from this filename: ' + path)
2017-11-16 18:39:47 +00:00
return None
else:
try:
best_subtitles = download_best_subtitles([video], {lang_obj}, providers=providers, min_score=minimum_score, hearing_impaired=hi, provider_configs=providers_auth)
except Exception as e:
logging.exception('Error trying to get the best subtitles for this file: ' + path)
return None
else:
try:
2018-01-05 15:41:08 +00:00
best_subtitle = best_subtitles[video][0]
except:
logging.debug('No subtitles found for ' + path)
2018-01-05 15:41:08 +00:00
return None
else:
2018-01-10 16:44:47 +00:00
single = get_general_settings()[7]
2018-01-05 15:41:08 +00:00
try:
score = round(float(compute_score(best_subtitle, video, hearing_impaired=hi)) / type_of_score * 100, 2)
if used_sceneName == True:
video = scan_video(path)
if single is True:
2018-01-10 16:44:47 +00:00
result = save_subtitles(video, [best_subtitle], single=True, encoding='utf-8')
else:
result = save_subtitles(video, [best_subtitle], encoding='utf-8')
2018-01-05 15:41:08 +00:00
except:
logging.error('Error saving subtitles file to disk.')
return None
else:
2018-10-01 00:23:12 +00:00
downloaded_provider = str(result[0]).strip('<>').split(' ')[0][:-8]
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
2018-10-01 00:23:12 +00:00
downloaded_path = get_subtitle_path(path, language=lang_obj)
if used_sceneName == True:
2018-08-28 14:52:48 +00:00
message = downloaded_language + " subtitles downloaded from " + downloaded_provider + " with a score of " + unicode(score) + "% using this scene name: " + sceneName
else:
2018-03-15 02:59:07 +00:00
message = downloaded_language + " subtitles downloaded from " + downloaded_provider + " with a score of " + unicode(score) + "% using filename guessing."
if use_postprocessing is True:
2018-03-24 00:00:50 +00:00
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language, downloaded_language_code2, downloaded_language_code3)
try:
if os.name == 'nt':
codepage = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(':')[-1].strip()
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# wait for the process to terminate
out, err = process.communicate()
if os.name == 'nt':
out = out.decode(encoding)
except:
if out == "":
logging.error('Post-processing result for file ' + path + ' : Nothing returned from command execution')
else:
logging.error('Post-processing result for file ' + path + ' : ' + out)
else:
if out == "":
logging.info('Post-processing result for file ' + path + ' : Nothing returned from command execution')
else:
logging.info('Post-processing result for file ' + path + ' : ' + out)
2018-01-05 15:41:08 +00:00
return message
2017-10-16 23:27:19 +00:00
def manual_search(path, language, hi, providers, providers_auth, sceneName, media_type):
if hi == "True":
hi = True
else:
hi = False
language_set = set()
for lang in ast.literal_eval(language):
2018-09-27 19:06:34 +00:00
if lang == 'pb':
language_set.add(Language('por', 'BR'))
else:
language_set.add(Language(alpha3_from_alpha2(lang)))
try:
if sceneName != "None":
video = Video.fromname(sceneName)
else:
video = scan_video(path)
except:
logging.error("Error trying to get video information.")
else:
if media_type == "movie":
max_score = 120.0
elif media_type == "series":
max_score = 360.0
try:
with AsyncProviderPool(max_workers=None, providers=providers, provider_configs=providers_auth) as p:
subtitles = p.list_subtitles(video, language_set)
except Exception as e:
logging.exception("Error trying to get subtitle list from provider")
else:
subtitles_list = []
for s in subtitles:
{s: compute_score(s, video, hearing_impaired=hi)}
if media_type == "movie":
matched = set(s.get_matches(video))
if hi == s.hearing_impaired:
matched.add('hearing_impaired')
not_matched = set(score.movie_scores.keys()) - matched
if "title" in not_matched:
continue
elif media_type == "series":
matched = set(s.get_matches(video))
if hi == s.hearing_impaired:
matched.add('hearing_impaired')
not_matched = set(score.episode_scores.keys()) - matched
if "series" in not_matched or "season" in not_matched or "episode" in not_matched:
continue
subtitles_list.append(dict(score=round((compute_score(s, video, hearing_impaired=hi) / max_score * 100), 2), language=alpha2_from_alpha3(s.language.alpha3), hearing_impaired=str(s.hearing_impaired), provider=s.provider_name, subtitle=codecs.encode(pickle.dumps(s), "base64").decode(), url=s.page_link, matches=list(matched), dont_matches=list(not_matched)))
subtitles_dict = {}
subtitles_dict = sorted(subtitles_list, key=lambda x: x['score'], reverse=True)
return(subtitles_dict)
def manual_download_subtitle(path, language, hi, subtitle, provider, providers_auth, sceneName, media_type):
if hi == "True":
hi = True
else:
hi = False
subtitle = pickle.loads(codecs.decode(subtitle.encode(), "base64"))
if media_type == 'series':
type_of_score = 360
elif media_type == 'movie':
type_of_score = 120
use_scenename = get_general_settings()[9]
use_postprocessing = get_general_settings()[10]
postprocessing_cmd = get_general_settings()[11]
2018-09-27 19:06:34 +00:00
if language == 'pb':
language = alpha3_from_alpha2(language)
lang_obj = Language('por', 'BR')
else:
language = alpha3_from_alpha2(language)
lang_obj = Language(language)
try:
2018-09-17 01:26:06 +00:00
if sceneName is None or use_scenename is False:
used_sceneName = False
video = scan_video(path)
else:
used_sceneName = True
video = Video.fromname(sceneName)
except Exception as e:
logging.exception('Error trying to extract information from this filename: ' + path)
return None
else:
try:
best_subtitle = subtitle
2018-09-22 22:07:46 +00:00
download_subtitles([best_subtitle], providers=provider, provider_configs=providers_auth)
except Exception as e:
logging.exception('Error downloading subtitles for ' + path)
return None
else:
single = get_general_settings()[7]
try:
score = round(float(compute_score(best_subtitle, video, hearing_impaired=hi)) / type_of_score * 100, 2)
if used_sceneName == True:
video = scan_video(path)
if single is True:
result = save_subtitles(video, [best_subtitle], single=True, encoding='utf-8')
else:
result = save_subtitles(video, [best_subtitle], encoding='utf-8')
except Exception as e:
logging.exception('Error saving subtitles file to disk.')
return None
else:
2018-10-01 00:23:12 +00:00
downloaded_provider = str(result[0]).strip('<>').split(' ')[0][:-8]
downloaded_language = language_from_alpha3(language)
downloaded_language_code2 = alpha2_from_alpha3(language)
downloaded_language_code3 = language
2018-10-01 00:23:12 +00:00
downloaded_path = get_subtitle_path(path, language=lang_obj)
message = downloaded_language + " subtitles downloaded from " + downloaded_provider + " with a score of " + unicode(score) + "% using manual search."
if use_postprocessing is True:
command = pp_replace(postprocessing_cmd, path, downloaded_path, downloaded_language, downloaded_language_code2, downloaded_language_code3)
try:
if os.name == 'nt':
codepage = subprocess.Popen("chcp", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# wait for the process to terminate
out_codepage, err_codepage = codepage.communicate()
encoding = out_codepage.split(':')[-1].strip()
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# wait for the process to terminate
out, err = process.communicate()
if os.name == 'nt':
out = out.decode(encoding)
except:
if out == "":
logging.error('Post-processing result for file ' + path + ' : Nothing returned from command execution')
else:
logging.error('Post-processing result for file ' + path + ' : ' + out)
else:
if out == "":
logging.info('Post-processing result for file ' + path + ' : Nothing returned from command execution')
else:
logging.info('Post-processing result for file ' + path + ' : ' + out)
return message
2017-10-16 23:27:19 +00:00
def series_download_subtitles(no):
if get_general_settings()[24] is True:
monitored_only_query_string = ' AND monitored = "True"'
else:
monitored_only_query_string = ""
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
2017-10-16 23:27:19 +00:00
c_db = conn_db.cursor()
episodes_details = c_db.execute('SELECT path, missing_subtitles, sonarrEpisodeId, scene_name FROM table_episodes WHERE sonarrSeriesId = ? AND missing_subtitles != "[]"' + monitored_only_query_string, (no,)).fetchall()
2017-10-16 23:27:19 +00:00
series_details = c_db.execute("SELECT hearing_impaired FROM table_shows WHERE sonarrSeriesId = ?", (no,)).fetchone()
c_db.close()
2018-09-22 22:07:46 +00:00
providers_list = get_providers()
providers_auth = get_providers_auth()
2017-10-16 23:27:19 +00:00
for episode in episodes_details:
for language in ast.literal_eval(episode[1]):
if language is not None:
message = download_subtitle(path_replace(episode[0]), str(alpha3_from_alpha2(language)), series_details[0], providers_list, providers_auth, episode[3], 'series')
if message is not None:
store_subtitles(path_replace(episode[0]))
history_log(1, no, episode[2], message)
send_notifications(no, episode[2], message)
2017-11-16 18:42:23 +00:00
list_missing_subtitles(no)
2017-10-16 23:27:19 +00:00
2018-04-24 14:48:52 +00:00
def movies_download_subtitles(no):
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
2018-04-24 14:48:52 +00:00
c_db = conn_db.cursor()
movie = c_db.execute("SELECT path, missing_subtitles, radarrId, sceneName, hearing_impaired FROM table_movies WHERE radarrId = ?", (no,)).fetchone()
c_db.close()
2018-09-22 22:07:46 +00:00
providers_list = get_providers()
providers_auth = get_providers_auth()
2018-04-24 14:48:52 +00:00
for language in ast.literal_eval(movie[1]):
if language is not None:
message = download_subtitle(path_replace_movie(movie[0]), str(alpha3_from_alpha2(language)), movie[4], providers_list, providers_auth, movie[3], 'movie')
if message is not None:
store_subtitles_movie(path_replace_movie(movie[0]))
history_log_movie(1, no, message)
send_notifications_movie(no, message)
2018-04-24 14:48:52 +00:00
list_missing_subtitles_movies(no)
2017-10-16 23:27:19 +00:00
def wanted_download_subtitles(path):
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
2017-11-16 19:09:40 +00:00
c_db = conn_db.cursor()
episodes_details = c_db.execute("SELECT table_episodes.path, table_episodes.missing_subtitles, table_episodes.sonarrEpisodeId, table_episodes.sonarrSeriesId, table_shows.hearing_impaired, table_episodes.scene_name, table_episodes.failedAttempts FROM table_episodes INNER JOIN table_shows on table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE table_episodes.path = ? AND missing_subtitles != '[]'", (path_replace_reverse(path),)).fetchall()
2017-11-16 19:09:40 +00:00
c_db.close()
2017-10-16 23:27:19 +00:00
2018-09-22 22:07:46 +00:00
providers_list = get_providers()
providers_auth = get_providers_auth()
2017-10-16 23:27:19 +00:00
2017-11-16 19:09:40 +00:00
for episode in episodes_details:
attempt = episode[6]
if type(attempt) == unicode:
attempt = ast.literal_eval(attempt)
2017-11-16 19:09:40 +00:00
for language in ast.literal_eval(episode[1]):
if attempt is None:
attempt = []
attempt.append([language, time.time()])
else:
att = zip(*attempt)[0]
if language not in att:
attempt.append([language, time.time()])
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
c_db = conn_db.cursor()
c_db.execute('UPDATE table_episodes SET failedAttempts = ? WHERE sonarrEpisodeId = ?', (unicode(attempt), episode[2]))
conn_db.commit()
c_db.close()
for i in range(len(attempt)):
if attempt[i][0] == language:
if search_active(attempt[i][1]) is True:
message = download_subtitle(path_replace(episode[0]), str(alpha3_from_alpha2(language)), episode[4], providers_list, providers_auth, episode[5], 'series')
if message is not None:
store_subtitles(path_replace(episode[0]))
list_missing_subtitles(episode[3])
history_log(1, episode[3], episode[2], message)
send_notifications(episode[3], episode[2], message)
else:
logging.debug('Search is not active for episode ' + episode[0] + ' Language: ' + attempt[i][0])
2017-11-16 17:04:20 +00:00
def wanted_download_subtitles_movie(path):
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
c_db = conn_db.cursor()
movies_details = c_db.execute("SELECT path, missing_subtitles, radarrId, radarrId, hearing_impaired, sceneName, failedAttempts FROM table_movies WHERE path = ? AND missing_subtitles != '[]'", (path_replace_reverse_movie(path),)).fetchall()
c_db.close()
2018-09-22 22:07:46 +00:00
providers_list = get_providers()
providers_auth = get_providers_auth()
for movie in movies_details:
attempt = movie[6]
if type(attempt) == unicode:
attempt = ast.literal_eval(attempt)
for language in ast.literal_eval(movie[1]):
if attempt is None:
attempt = []
attempt.append([language, time.time()])
else:
att = zip(*attempt)[0]
if language not in att:
attempt.append([language, time.time()])
conn_db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
c_db = conn_db.cursor()
c_db.execute('UPDATE table_movies SET failedAttempts = ? WHERE radarrId = ?', (unicode(attempt), movie[2]))
conn_db.commit()
c_db.close()
for i in range(len(attempt)):
if attempt[i][0] == language:
if search_active(attempt[i][1]) is True:
message = download_subtitle(path_replace_movie(movie[0]), str(alpha3_from_alpha2(language)), movie[4], providers_list, providers_auth, movie[5], 'movie')
if message is not None:
store_subtitles_movie(path_replace_movie(movie[0]))
list_missing_subtitles_movies(movie[3])
history_log_movie(1, movie[3], message)
send_notifications_movie(movie[3], message)
else:
logging.info('Search is not active for movie ' + movie[0] + ' Language: ' + attempt[i][0])
2017-10-23 03:00:11 +00:00
def wanted_search_missing_subtitles():
db = sqlite3.connect(os.path.join(config_dir, 'db/bazarr.db'), timeout=30)
2017-10-23 03:00:11 +00:00
db.create_function("path_substitution", 1, path_replace)
db.create_function("path_substitution_movie", 1, path_replace_movie)
2017-10-23 03:00:11 +00:00
c = db.cursor()
if get_general_settings()[24] is True:
monitored_only_query_string = ' AND monitored = "True"'
else:
monitored_only_query_string = ""
c.execute("SELECT path_substitution(path) FROM table_episodes WHERE missing_subtitles != '[]'" + monitored_only_query_string)
2018-05-02 10:25:42 +00:00
episodes = c.fetchall()
c.execute("SELECT path_substitution_movie(path) FROM table_movies WHERE missing_subtitles != '[]'" + monitored_only_query_string)
2018-05-02 10:25:42 +00:00
movies = c.fetchall()
2017-10-23 03:00:11 +00:00
c.close()
integration = get_general_settings()
if integration[12] is True:
for episode in episodes:
wanted_download_subtitles(episode[0])
if integration[13] is True:
for movie in movies:
wanted_download_subtitles_movie(movie[0])
2017-12-06 04:07:37 +00:00
2018-05-02 10:25:42 +00:00
logging.info('Finished searching for missing subtitles. Check histories for more information.')
def search_active(timestamp):
if get_general_settings()[25] is True:
search_deadline = timedelta(weeks=3)
search_delta = timedelta(weeks=1)
aa = datetime.fromtimestamp(float(timestamp))
attempt_datetime = datetime.strptime(str(aa).split(".")[0], '%Y-%m-%d %H:%M:%S')
attempt_search_deadline = attempt_datetime + search_deadline
today = datetime.today()
attempt_age_in_days = (today.date() - attempt_search_deadline.date()).days
if today.date() <= attempt_search_deadline.date():
return True
elif attempt_age_in_days % search_delta.days == 0:
return True
else:
return False
else:
return True