2019-12-11 03:20:42 +00:00
|
|
|
import os
|
|
|
|
import ast
|
|
|
|
import libs
|
2019-12-16 04:44:30 +00:00
|
|
|
from datetime import timedelta
|
|
|
|
import datetime
|
|
|
|
import pretty
|
2020-01-23 04:10:33 +00:00
|
|
|
import time
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
from get_args import args
|
2019-12-31 19:02:49 +00:00
|
|
|
from config import settings, base_url
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
from init import *
|
|
|
|
import logging
|
2019-12-16 04:44:30 +00:00
|
|
|
from database import database
|
2019-12-11 03:20:42 +00:00
|
|
|
from helper import path_replace, path_replace_reverse, path_replace_movie, path_replace_reverse_movie
|
2020-01-07 03:26:28 +00:00
|
|
|
from get_languages import language_from_alpha3, language_from_alpha2, alpha2_from_alpha3, alpha2_from_language, \
|
|
|
|
alpha3_from_language, alpha3_from_alpha2
|
|
|
|
from get_subtitle import download_subtitle, series_download_subtitles, movies_download_subtitles, \
|
|
|
|
manual_search, manual_download_subtitle, manual_upload_subtitle
|
|
|
|
from notifier import send_notifications, send_notifications_movie
|
|
|
|
from list_subtitles import store_subtitles, store_subtitles_movie, series_scan_subtitles, movies_scan_subtitles, \
|
|
|
|
list_missing_subtitles, list_missing_subtitles_movies
|
|
|
|
from utils import history_log, history_log_movie
|
|
|
|
from get_providers import get_providers, get_providers_auth, list_throttled_providers
|
2020-01-24 11:33:50 +00:00
|
|
|
from websocket_handler import event_stream
|
2020-01-07 03:26:28 +00:00
|
|
|
|
|
|
|
from subliminal_patch.core import SUBTITLE_EXTENSIONS
|
2019-12-13 02:59:48 +00:00
|
|
|
|
2019-12-27 22:05:45 +00:00
|
|
|
from flask import Flask, jsonify, request, Response, Blueprint
|
2019-12-13 02:59:48 +00:00
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
from flask_restful import Resource, Api
|
|
|
|
|
2019-12-31 19:02:49 +00:00
|
|
|
api_bp = Blueprint('api', __name__, url_prefix=base_url.rstrip('/')+'/api')
|
2019-12-16 13:58:10 +00:00
|
|
|
api = Api(api_bp)
|
2019-12-15 04:58:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Badges(Resource):
|
|
|
|
def get(self):
|
|
|
|
result = {
|
|
|
|
"missing_episodes": database.execute("SELECT COUNT(*) as count FROM table_episodes WHERE missing_subtitles "
|
|
|
|
"is not null AND missing_subtitles != '[]'", only_one=True)['count'],
|
|
|
|
"missing_movies": database.execute("SELECT COUNT(*) as count FROM table_movies WHERE missing_subtitles "
|
|
|
|
"is not null AND missing_subtitles != '[]'", only_one=True)['count'],
|
|
|
|
"throttled_providers": len(eval(str(settings.general.throtteled_providers)))
|
|
|
|
}
|
|
|
|
return jsonify(result)
|
|
|
|
|
|
|
|
|
2020-01-22 04:54:32 +00:00
|
|
|
class Languages(Resource):
|
|
|
|
def get(self):
|
|
|
|
enabled = request.args.get('enabled')
|
|
|
|
if enabled.lower() in ['true', '1']:
|
|
|
|
result = database.execute("SELECT * FROM table_settings_languages WHERE enabled=1")
|
|
|
|
else:
|
|
|
|
result = database.execute("SELECT * FROM table_settings_languages")
|
|
|
|
return jsonify(result)
|
|
|
|
|
|
|
|
|
2019-12-11 03:20:42 +00:00
|
|
|
class Series(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2019-12-11 03:20:42 +00:00
|
|
|
seriesId = request.args.get('id')
|
2019-12-17 00:41:50 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_shows", only_one=True)['count']
|
2019-12-11 03:20:42 +00:00
|
|
|
if seriesId:
|
2019-12-28 16:43:48 +00:00
|
|
|
result = database.execute("SELECT * FROM table_shows WHERE sonarrSeriesId=? ORDER BY sortTitle ASC LIMIT ? "
|
2020-01-02 06:16:00 +00:00
|
|
|
"OFFSET ?", (seriesId, length, start))
|
2019-12-11 03:20:42 +00:00
|
|
|
else:
|
2019-12-28 16:43:48 +00:00
|
|
|
result = database.execute("SELECT * FROM table_shows ORDER BY sortTitle ASC LIMIT ? OFFSET ?", (length, start))
|
2019-12-11 03:20:42 +00:00
|
|
|
for item in result:
|
|
|
|
# Parse audio language
|
2019-12-15 04:58:51 +00:00
|
|
|
if item['audio_language']:
|
|
|
|
item.update({"audio_language": {"name": item['audio_language'],
|
|
|
|
"code2": alpha2_from_language(item['audio_language']),
|
|
|
|
"code3": alpha3_from_language(item['audio_language'])}})
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
# Parse desired languages
|
2019-12-15 04:58:51 +00:00
|
|
|
if item['languages'] and item['languages'] != 'None':
|
|
|
|
item.update({"languages": ast.literal_eval(item['languages'])})
|
|
|
|
for i, subs in enumerate(item['languages']):
|
|
|
|
item['languages'][i] = {"name": language_from_alpha2(subs),
|
|
|
|
"code2": subs,
|
|
|
|
"code3": alpha3_from_alpha2(subs)}
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
# Parse alternate titles
|
2019-12-15 04:58:51 +00:00
|
|
|
if item['alternateTitles']:
|
|
|
|
item.update({"alternateTitles": ast.literal_eval(item['alternateTitles'])})
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isdir(mapped_path)})
|
2019-12-15 04:58:51 +00:00
|
|
|
|
|
|
|
# Add missing subtitles episode count
|
|
|
|
item.update({"episodeMissingCount": database.execute("SELECT COUNT(*) as count FROM table_episodes WHERE "
|
|
|
|
"sonarrSeriesId=? AND missing_subtitles is not null "
|
2019-12-29 04:39:13 +00:00
|
|
|
"AND missing_subtitles != '[]'",
|
|
|
|
(item['sonarrSeriesId'],), only_one=True)['count']})
|
2019-12-15 04:58:51 +00:00
|
|
|
|
|
|
|
# Add episode count
|
|
|
|
item.update({"episodeFileCount": database.execute("SELECT COUNT(*) as count FROM table_episodes WHERE "
|
2019-12-29 04:39:13 +00:00
|
|
|
"sonarrSeriesId=?", (item['sonarrSeriesId'],),
|
2019-12-15 04:58:51 +00:00
|
|
|
only_one=True)['count']})
|
2019-12-17 00:41:50 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=result)
|
2019-12-11 03:20:42 +00:00
|
|
|
|
|
|
|
|
2020-01-22 04:54:32 +00:00
|
|
|
def post(self):
|
|
|
|
seriesId = request.args.get('seriesid')
|
|
|
|
|
|
|
|
lang = request.form.getlist('languages')
|
|
|
|
if len(lang) > 0:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
lang = 'None'
|
|
|
|
|
|
|
|
single_language = settings.general.getboolean('single_language')
|
|
|
|
if single_language:
|
|
|
|
if str(lang) == "['None']":
|
|
|
|
lang = 'None'
|
|
|
|
else:
|
|
|
|
lang = str(lang)
|
|
|
|
else:
|
|
|
|
if str(lang) == "['']":
|
|
|
|
lang = '[]'
|
|
|
|
|
|
|
|
hi = request.form.get('hearing_impaired')
|
|
|
|
forced = request.form.get('forced')
|
|
|
|
|
|
|
|
if hi == "on":
|
|
|
|
hi = "True"
|
|
|
|
else:
|
|
|
|
hi = "False"
|
|
|
|
|
|
|
|
result = database.execute("UPDATE table_shows SET languages=?, hearing_impaired=?, forced=? WHERE "
|
|
|
|
"sonarrSeriesId=?", (str(lang), hi, forced, seriesId))
|
|
|
|
|
|
|
|
list_missing_subtitles(no=seriesId)
|
|
|
|
|
2020-01-24 11:33:50 +00:00
|
|
|
event_stream.write(type='series', action='update', series=seriesId)
|
|
|
|
|
2020-01-22 04:54:32 +00:00
|
|
|
return '', 204
|
|
|
|
|
|
|
|
|
2019-12-14 17:34:14 +00:00
|
|
|
class Episodes(Resource):
|
|
|
|
def get(self):
|
2020-01-02 14:41:30 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2020-01-12 17:50:27 +00:00
|
|
|
seriesId = request.args.get('seriesid')
|
|
|
|
episodeId = request.args.get('episodeid')
|
2020-01-02 14:41:30 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_episodes WHERE sonarrSeriesId=?",
|
|
|
|
(seriesId,), only_one=True)['count']
|
2020-01-12 17:50:27 +00:00
|
|
|
if episodeId:
|
|
|
|
result = database.execute("SELECT * FROM table_episodes WHERE sonarrEpisodeId=?", (episodeId,))
|
|
|
|
desired_languages = database.execute("SELECT languages FROM table_shows WHERE sonarrSeriesId=?",
|
|
|
|
(seriesId,), only_one=True)['languages']
|
|
|
|
if desired_languages == "None":
|
|
|
|
desired_languages = '[]'
|
|
|
|
elif seriesId:
|
2020-01-04 05:55:44 +00:00
|
|
|
result = database.execute("SELECT * FROM table_episodes WHERE sonarrSeriesId=? ORDER BY season DESC, "
|
|
|
|
"episode DESC", (seriesId,))
|
2020-01-10 00:54:00 +00:00
|
|
|
desired_languages = database.execute("SELECT languages FROM table_shows WHERE sonarrSeriesId=?",
|
|
|
|
(seriesId,), only_one=True)['languages']
|
|
|
|
if desired_languages == "None":
|
|
|
|
desired_languages = '[]'
|
2019-12-14 17:34:14 +00:00
|
|
|
else:
|
2019-12-17 00:41:50 +00:00
|
|
|
return "Series ID not provided", 400
|
2019-12-14 17:34:14 +00:00
|
|
|
for item in result:
|
2020-01-12 17:50:27 +00:00
|
|
|
# Add Datatables rowId
|
|
|
|
item.update({"DT_RowId": 'row_' + str(item['sonarrEpisodeId'])})
|
|
|
|
|
2019-12-15 04:58:51 +00:00
|
|
|
# Parse subtitles
|
|
|
|
if item['subtitles']:
|
|
|
|
item.update({"subtitles": ast.literal_eval(item['subtitles'])})
|
|
|
|
for subs in item['subtitles']:
|
2020-01-04 21:25:25 +00:00
|
|
|
subtitle = subs[0].split(':')
|
|
|
|
subs[0] = {"name": language_from_alpha2(subtitle[0]),
|
|
|
|
"code2": subtitle[0],
|
|
|
|
"code3": alpha3_from_alpha2(subtitle[0]),
|
|
|
|
"forced": True if len(subtitle) > 1 else False}
|
2019-12-15 04:58:51 +00:00
|
|
|
|
|
|
|
# Parse missing subtitles
|
|
|
|
if item['missing_subtitles']:
|
|
|
|
item.update({"missing_subtitles": ast.literal_eval(item['missing_subtitles'])})
|
|
|
|
for i, subs in enumerate(item['missing_subtitles']):
|
2020-01-04 21:25:25 +00:00
|
|
|
subtitle = subs.split(':')
|
|
|
|
item['missing_subtitles'][i] = {"name": language_from_alpha2(subtitle[0]),
|
|
|
|
"code2": subtitle[0],
|
|
|
|
"code3": alpha3_from_alpha2(subtitle[0]),
|
|
|
|
"forced": True if len(subtitle) > 1 else False}
|
2019-12-15 04:58:51 +00:00
|
|
|
|
2019-12-14 17:34:14 +00:00
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
2020-01-10 00:54:00 +00:00
|
|
|
|
|
|
|
# Add the series desired subtitles language code2
|
|
|
|
item.update({"desired_languages": desired_languages})
|
2020-01-02 14:41:30 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=result)
|
2019-12-14 17:34:14 +00:00
|
|
|
|
2020-01-07 03:26:28 +00:00
|
|
|
class EpisodesSubtitlesDelete(Resource):
|
|
|
|
def delete(self):
|
|
|
|
episodePath = request.form.get('episodePath')
|
|
|
|
language = request.form.get('language')
|
|
|
|
subtitlesPath = request.form.get('subtitlesPath')
|
|
|
|
sonarrSeriesId = request.form.get('sonarrSeriesId')
|
|
|
|
sonarrEpisodeId = request.form.get('sonarrEpisodeId')
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.remove(path_replace(subtitlesPath))
|
|
|
|
result = language_from_alpha3(language) + " subtitles deleted from disk."
|
|
|
|
history_log(0, sonarrSeriesId, sonarrEpisodeId, result, language=alpha2_from_alpha3(language))
|
2020-01-10 00:54:00 +00:00
|
|
|
store_subtitles(path_replace_reverse(episodePath), episodePath)
|
|
|
|
return result, 202
|
2020-01-07 03:26:28 +00:00
|
|
|
except OSError as e:
|
|
|
|
logging.exception('BAZARR cannot delete subtitles file: ' + subtitlesPath)
|
2020-01-10 00:54:00 +00:00
|
|
|
|
2020-01-07 03:26:28 +00:00
|
|
|
store_subtitles(path_replace_reverse(episodePath), episodePath)
|
2020-01-10 00:54:00 +00:00
|
|
|
return '', 204
|
2020-01-07 03:26:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
class EpisodesSubtitlesDownload(Resource):
|
|
|
|
def post(self):
|
|
|
|
episodePath = request.form.get('episodePath')
|
|
|
|
sceneName = request.form.get('sceneName')
|
|
|
|
if sceneName == "null":
|
|
|
|
sceneName = "None"
|
|
|
|
language = request.form.get('language')
|
|
|
|
hi = request.form.get('hi').capitalize()
|
|
|
|
forced = request.form.get('forced').capitalize()
|
|
|
|
sonarrSeriesId = request.form.get('sonarrSeriesId')
|
|
|
|
sonarrEpisodeId = request.form.get('sonarrEpisodeId')
|
|
|
|
title = request.form.get('title')
|
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = download_subtitle(episodePath, language, hi, forced, providers_list, providers_auth, sceneName,
|
|
|
|
title,
|
|
|
|
'series')
|
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
forced = result[5]
|
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
|
|
|
history_log(1, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score)
|
|
|
|
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
|
|
|
store_subtitles(path, episodePath)
|
|
|
|
return result, 201
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return '', 204
|
|
|
|
|
|
|
|
|
|
|
|
class EpisodesSubtitlesManualSearch(Resource):
|
|
|
|
def post(self):
|
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
|
|
|
episodePath = request.form.get('episodePath')
|
|
|
|
sceneName = request.form.get('sceneName')
|
|
|
|
if sceneName == "null":
|
|
|
|
sceneName = "None"
|
|
|
|
language = request.form.get('language')
|
|
|
|
hi = request.form.get('hi').capitalize()
|
|
|
|
forced = request.form.get('forced').capitalize()
|
|
|
|
title = request.form.get('title')
|
|
|
|
providers_list = get_providers()
|
|
|
|
providers_auth = get_providers_auth()
|
|
|
|
|
|
|
|
data = manual_search(episodePath, language, hi, forced, providers_list, providers_auth, sceneName, title,
|
|
|
|
'series')
|
|
|
|
row_count = len(data)
|
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=data)
|
|
|
|
|
|
|
|
|
|
|
|
class EpisodesSubtitlesManualDownload(Resource):
|
|
|
|
def post(self):
|
|
|
|
episodePath = request.form.get('episodePath')
|
|
|
|
sceneName = request.form.get('sceneName')
|
|
|
|
if sceneName == "null":
|
|
|
|
sceneName = "None"
|
|
|
|
language = request.form.get('language')
|
|
|
|
hi = request.form.get('hi').capitalize()
|
|
|
|
forced = request.form.get('forced').capitalize()
|
|
|
|
selected_provider = request.form.get('provider')
|
|
|
|
subtitle = request.form.get('subtitle')
|
|
|
|
sonarrSeriesId = request.form.get('sonarrSeriesId')
|
|
|
|
sonarrEpisodeId = request.form.get('sonarrEpisodeId')
|
|
|
|
title = request.form.get('title')
|
|
|
|
providers_auth = get_providers_auth()
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = manual_download_subtitle(episodePath, language, hi, forced, subtitle, selected_provider,
|
|
|
|
providers_auth,
|
|
|
|
sceneName, title, 'series')
|
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
forced = result[5]
|
|
|
|
language_code = result[2] + ":forced" if forced else result[2]
|
|
|
|
provider = result[3]
|
|
|
|
score = result[4]
|
|
|
|
history_log(2, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score)
|
|
|
|
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
|
|
|
store_subtitles(path, episodePath)
|
|
|
|
return result, 201
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return '', 204
|
|
|
|
|
|
|
|
|
|
|
|
class EpisodesSubtitlesUpload(Resource):
|
|
|
|
def post(self):
|
|
|
|
episodePath = request.form.get('episodePath')
|
|
|
|
sceneName = request.form.get('sceneName')
|
|
|
|
if sceneName == "null":
|
|
|
|
sceneName = "None"
|
|
|
|
language = request.form.get('language')
|
2020-01-11 04:40:38 +00:00
|
|
|
forced = True if request.form.get('forced') == 'on' else False
|
2020-01-07 03:26:28 +00:00
|
|
|
upload = request.files.get('upload')
|
|
|
|
sonarrSeriesId = request.form.get('sonarrSeriesId')
|
|
|
|
sonarrEpisodeId = request.form.get('sonarrEpisodeId')
|
|
|
|
title = request.form.get('title')
|
|
|
|
|
|
|
|
_, ext = os.path.splitext(upload.filename)
|
|
|
|
|
|
|
|
if ext not in SUBTITLE_EXTENSIONS:
|
|
|
|
raise ValueError('A subtitle of an invalid format was uploaded.')
|
|
|
|
|
|
|
|
try:
|
|
|
|
result = manual_upload_subtitle(path=episodePath,
|
|
|
|
language=language,
|
|
|
|
forced=forced,
|
|
|
|
title=title,
|
|
|
|
scene_name=sceneName,
|
|
|
|
media_type='series',
|
|
|
|
subtitle=upload)
|
|
|
|
|
|
|
|
if result is not None:
|
|
|
|
message = result[0]
|
|
|
|
path = result[1]
|
|
|
|
language_code = language + ":forced" if forced else language
|
|
|
|
provider = "manual"
|
|
|
|
score = 360
|
|
|
|
history_log(4, sonarrSeriesId, sonarrEpisodeId, message, path, language_code, provider, score)
|
|
|
|
send_notifications(sonarrSeriesId, sonarrEpisodeId, message)
|
|
|
|
store_subtitles(path, episodePath)
|
|
|
|
|
|
|
|
return result, 201
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return '', 204
|
|
|
|
|
2019-12-14 17:34:14 +00:00
|
|
|
|
2020-01-22 04:54:32 +00:00
|
|
|
class EpisodesScanDisk(Resource):
|
|
|
|
def get(self):
|
|
|
|
seriesid = request.args.get('seriesid')
|
|
|
|
series_scan_subtitles(seriesid)
|
|
|
|
return '', 200
|
|
|
|
|
|
|
|
|
|
|
|
class EpisodesSearchMissing(Resource):
|
|
|
|
def get(self):
|
|
|
|
seriesid = request.args.get('seriesid')
|
|
|
|
series_download_subtitles(seriesid)
|
|
|
|
return '', 200
|
|
|
|
|
|
|
|
|
2020-01-23 04:10:33 +00:00
|
|
|
class EpisodesHistory(Resource):
|
|
|
|
def get(self):
|
|
|
|
episodeid = request.args.get('episodeid')
|
|
|
|
|
|
|
|
episode_history = database.execute("SELECT action, timestamp, language, provider, score FROM table_history "
|
|
|
|
"WHERE sonarrEpisodeId=? ORDER BY timestamp DESC", (episodeid,))
|
|
|
|
for item in episode_history:
|
|
|
|
item['timestamp'] = "<div title='" + \
|
|
|
|
time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(item['timestamp'])) + \
|
|
|
|
"' data-toggle='tooltip' data-placement='left'>" + \
|
|
|
|
pretty.date(datetime.datetime.fromtimestamp(item['timestamp'])) + "</div>"
|
|
|
|
if item['language']:
|
|
|
|
item['language'] = language_from_alpha2(item['language'])
|
|
|
|
else:
|
|
|
|
item['language'] = "<i>undefined</i>"
|
|
|
|
if item['score']:
|
|
|
|
item['score'] = str(round((int(item['score']) * 100 / 360), 2)) + "%"
|
|
|
|
|
|
|
|
return jsonify(data=episode_history)
|
|
|
|
|
|
|
|
|
2019-12-15 04:58:51 +00:00
|
|
|
class Movies(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2019-12-15 04:58:51 +00:00
|
|
|
moviesId = request.args.get('id')
|
2019-12-17 00:41:50 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_movies", only_one=True)['count']
|
2019-12-15 04:58:51 +00:00
|
|
|
if moviesId:
|
2019-12-28 16:43:48 +00:00
|
|
|
result = database.execute("SELECT * FROM table_movies WHERE radarrId=? ORDER BY sortTitle ASC LIMIT ? "
|
|
|
|
"OFFSET ?", (length, start), (moviesId,))
|
2019-12-15 04:58:51 +00:00
|
|
|
else:
|
2019-12-28 16:43:48 +00:00
|
|
|
result = database.execute("SELECT * FROM table_movies ORDER BY sortTitle ASC LIMIT ? OFFSET ?",
|
|
|
|
(length, start))
|
2019-12-15 04:58:51 +00:00
|
|
|
for item in result:
|
|
|
|
# Parse audio language
|
|
|
|
if item['audio_language']:
|
|
|
|
item.update({"audio_language": {"name": item['audio_language'],
|
|
|
|
"code2": alpha2_from_language(item['audio_language']),
|
|
|
|
"code3": alpha3_from_language(item['audio_language'])}})
|
|
|
|
|
|
|
|
# Parse desired languages
|
|
|
|
if item['languages'] and item['languages'] != 'None':
|
|
|
|
item.update({"languages": ast.literal_eval(item['languages'])})
|
|
|
|
for i, subs in enumerate(item['languages']):
|
|
|
|
item['languages'][i] = {"name": language_from_alpha2(subs),
|
|
|
|
"code2": subs,
|
|
|
|
"code3": alpha3_from_alpha2(subs)}
|
|
|
|
|
|
|
|
# Parse alternate titles
|
|
|
|
if item['alternativeTitles']:
|
|
|
|
item.update({"alternativeTitles": ast.literal_eval(item['alternativeTitles'])})
|
|
|
|
|
|
|
|
# Parse failed attempts
|
|
|
|
if item['failedAttempts']:
|
|
|
|
item.update({"failedAttempts": ast.literal_eval(item['failedAttempts'])})
|
|
|
|
|
|
|
|
# Parse subtitles
|
|
|
|
if item['subtitles']:
|
|
|
|
item.update({"subtitles": ast.literal_eval(item['subtitles'])})
|
|
|
|
for subs in item['subtitles']:
|
|
|
|
subs[0] = {"name": language_from_alpha2(subs[0]),
|
|
|
|
"code2": subs[0],
|
|
|
|
"code3": alpha3_from_alpha2(subs[0])}
|
|
|
|
|
|
|
|
# Parse missing subtitles
|
|
|
|
if item['missing_subtitles']:
|
|
|
|
item.update({"missing_subtitles": ast.literal_eval(item['missing_subtitles'])})
|
|
|
|
for i, subs in enumerate(item['missing_subtitles']):
|
|
|
|
item['missing_subtitles'][i] = {"name": language_from_alpha2(subs),
|
|
|
|
"code2": subs,
|
|
|
|
"code3": alpha3_from_alpha2(subs)}
|
|
|
|
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace_movie(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
2019-12-17 00:41:50 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=result)
|
2019-12-15 04:58:51 +00:00
|
|
|
|
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
class HistorySeries(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
upgradable_episodes_not_perfect = []
|
|
|
|
if settings.general.getboolean('upgrade_subs'):
|
|
|
|
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
|
|
|
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
|
|
|
datetime.datetime(1970, 1, 1)).total_seconds()
|
|
|
|
|
|
|
|
if settings.general.getboolean('upgrade_manual'):
|
|
|
|
query_actions = [1, 2, 3]
|
|
|
|
else:
|
|
|
|
query_actions = [1, 3]
|
|
|
|
|
|
|
|
if settings.sonarr.getboolean('only_monitored'):
|
|
|
|
series_monitored_only_query_string = " AND monitored='True'"
|
|
|
|
else:
|
|
|
|
series_monitored_only_query_string = ''
|
|
|
|
|
|
|
|
upgradable_episodes = database.execute(
|
|
|
|
"SELECT video_path, MAX(timestamp) as timestamp, score FROM table_history "
|
|
|
|
"INNER JOIN table_episodes on table_episodes.sonarrEpisodeId = "
|
|
|
|
"table_history.sonarrEpisodeId WHERE action IN (" +
|
|
|
|
','.join(map(str, query_actions)) + ") AND timestamp > ? AND "
|
|
|
|
"score is not null" + series_monitored_only_query_string + " GROUP BY "
|
|
|
|
"table_history.video_path, table_history.language",
|
|
|
|
(minimum_timestamp,))
|
|
|
|
|
|
|
|
for upgradable_episode in upgradable_episodes:
|
|
|
|
if upgradable_episode['timestamp'] > minimum_timestamp:
|
|
|
|
try:
|
|
|
|
int(upgradable_episode['score'])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if int(upgradable_episode['score']) < 360:
|
|
|
|
upgradable_episodes_not_perfect.append(upgradable_episode)
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_history", only_one=True)['count']
|
2019-12-16 04:44:30 +00:00
|
|
|
data = database.execute("SELECT table_history.action, table_shows.title as seriesTitle, "
|
|
|
|
"table_episodes.season || 'x' || table_episodes.episode as episode_number, "
|
|
|
|
"table_episodes.title as episodeTitle, table_history.timestamp, "
|
|
|
|
"table_history.description, table_history.sonarrSeriesId, table_episodes.path, "
|
|
|
|
"table_history.language, table_history.score FROM table_history LEFT JOIN table_shows "
|
|
|
|
"on table_shows.sonarrSeriesId = table_history.sonarrSeriesId LEFT JOIN table_episodes "
|
|
|
|
"on table_episodes.sonarrEpisodeId = table_history.sonarrEpisodeId WHERE "
|
2019-12-17 00:41:50 +00:00
|
|
|
"table_episodes.title is not NULL ORDER BY timestamp DESC LIMIT ? OFFSET ?",
|
|
|
|
(length, start))
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
for item in data:
|
|
|
|
# Mark episode as upgradable or not
|
|
|
|
if {"video_path": str(item['path']), "timestamp": float(item['timestamp']), "score": str(item['score'])} in upgradable_episodes_not_perfect:
|
|
|
|
item.update({"upgradable": True})
|
|
|
|
else:
|
|
|
|
item.update({"upgradable": False})
|
|
|
|
|
|
|
|
# Parse language
|
|
|
|
if item['language'] and item['language'] != 'None':
|
|
|
|
splitted_language = item['language'].split(':')
|
|
|
|
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
|
|
|
|
"code2": splitted_language[0],
|
|
|
|
"code3": alpha3_from_alpha2(splitted_language[0]),
|
|
|
|
"forced": True if len(splitted_language) > 1 else False}
|
|
|
|
|
|
|
|
# Make timestamp pretty
|
|
|
|
if item['timestamp']:
|
|
|
|
item['timestamp'] = pretty.date(int(item['timestamp']))
|
|
|
|
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=data)
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
class HistoryMovies(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
2019-12-16 17:46:03 +00:00
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
upgradable_movies = []
|
|
|
|
upgradable_movies_not_perfect = []
|
|
|
|
if settings.general.getboolean('upgrade_subs'):
|
|
|
|
days_to_upgrade_subs = settings.general.days_to_upgrade_subs
|
|
|
|
minimum_timestamp = ((datetime.datetime.now() - timedelta(days=int(days_to_upgrade_subs))) -
|
|
|
|
datetime.datetime(1970, 1, 1)).total_seconds()
|
|
|
|
|
|
|
|
if settings.radarr.getboolean('only_monitored'):
|
|
|
|
movies_monitored_only_query_string = ' AND table_movies.monitored = "True"'
|
|
|
|
else:
|
|
|
|
movies_monitored_only_query_string = ""
|
|
|
|
|
|
|
|
if settings.general.getboolean('upgrade_manual'):
|
|
|
|
query_actions = [1, 2, 3]
|
|
|
|
else:
|
|
|
|
query_actions = [1, 3]
|
|
|
|
|
|
|
|
upgradable_movies = database.execute(
|
|
|
|
"SELECT video_path, MAX(timestamp) as timestamp, score FROM table_history_movie "
|
|
|
|
"INNER JOIN table_movies on table_movies.radarrId=table_history_movie.radarrId WHERE action IN (" +
|
|
|
|
','.join(map(str, query_actions)) + ") AND timestamp > ? AND score is not NULL" +
|
|
|
|
movies_monitored_only_query_string + " GROUP BY video_path, language", (minimum_timestamp,))
|
|
|
|
|
|
|
|
for upgradable_movie in upgradable_movies:
|
|
|
|
if upgradable_movie['timestamp'] > minimum_timestamp:
|
|
|
|
try:
|
|
|
|
int(upgradable_movie['score'])
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if int(upgradable_movie['score']) < 120:
|
|
|
|
upgradable_movies_not_perfect.append(upgradable_movie)
|
|
|
|
|
2019-12-16 17:46:03 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_history_movie", only_one=True)['count']
|
2019-12-16 04:44:30 +00:00
|
|
|
data = database.execute("SELECT table_history_movie.action, table_movies.title, table_history_movie.timestamp, "
|
|
|
|
"table_history_movie.description, table_history_movie.radarrId, "
|
|
|
|
"table_history_movie.video_path, table_history_movie.language, "
|
|
|
|
"table_history_movie.score FROM table_history_movie LEFT JOIN table_movies on "
|
2019-12-16 17:46:03 +00:00
|
|
|
"table_movies.radarrId = table_history_movie.radarrId ORDER BY timestamp DESC LIMIT ? "
|
|
|
|
"OFFSET ?", (length, start))
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
for item in data:
|
|
|
|
# Mark movies as upgradable or not
|
|
|
|
if {"video_path": str(item['video_path']), "timestamp": float(item['timestamp']), "score": str(item['score'])} in upgradable_movies_not_perfect:
|
|
|
|
item.update({"upgradable": True})
|
|
|
|
else:
|
|
|
|
item.update({"upgradable": False})
|
|
|
|
|
|
|
|
# Parse language
|
|
|
|
if item['language'] and item['language'] != 'None':
|
|
|
|
splitted_language = item['language'].split(':')
|
|
|
|
item['language'] = {"name": language_from_alpha2(splitted_language[0]),
|
|
|
|
"code2": splitted_language[0],
|
|
|
|
"code3": alpha3_from_alpha2(splitted_language[0]),
|
|
|
|
"forced": True if len(splitted_language) > 1 else False}
|
|
|
|
|
|
|
|
# Make timestamp pretty
|
|
|
|
if item['timestamp']:
|
|
|
|
item['timestamp'] = pretty.date(int(item['timestamp']))
|
|
|
|
|
|
|
|
if item['video_path']:
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace_movie(item['video_path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
|
|
|
else:
|
|
|
|
item.update({"mapped_path": None})
|
|
|
|
item.update({"exist": False})
|
|
|
|
|
2019-12-16 17:46:03 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=data)
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
class WantedSeries(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
if settings.sonarr.getboolean('only_monitored'):
|
|
|
|
monitored_only_query_string = " AND monitored='True'"
|
|
|
|
else:
|
|
|
|
monitored_only_query_string = ''
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_episodes", only_one=True)['count']
|
2019-12-16 04:44:30 +00:00
|
|
|
data = database.execute("SELECT table_shows.title as seriesTitle, "
|
|
|
|
"table_episodes.season || 'x' || table_episodes.episode as episode_number, "
|
|
|
|
"table_episodes.title as episodeTitle, table_episodes.missing_subtitles, "
|
|
|
|
"table_episodes.sonarrSeriesId, table_episodes.path, table_shows.hearing_impaired, "
|
|
|
|
"table_episodes.sonarrEpisodeId, table_episodes.scene_name, "
|
|
|
|
"table_episodes.failedAttempts FROM table_episodes INNER JOIN table_shows on "
|
|
|
|
"table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE "
|
|
|
|
"table_episodes.missing_subtitles != '[]'" + monitored_only_query_string +
|
2019-12-17 00:41:50 +00:00
|
|
|
" ORDER BY table_episodes._rowid_ DESC LIMIT ? OFFSET ?", (length, start))
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
for item in data:
|
|
|
|
# Parse missing subtitles
|
|
|
|
if item['missing_subtitles']:
|
|
|
|
item.update({"missing_subtitles": ast.literal_eval(item['missing_subtitles'])})
|
|
|
|
for i, subs in enumerate(item['missing_subtitles']):
|
|
|
|
splitted_subs = subs.split(':')
|
|
|
|
item['missing_subtitles'][i] = {"name": language_from_alpha2(splitted_subs[0]),
|
|
|
|
"code2": splitted_subs[0],
|
|
|
|
"code3": alpha3_from_alpha2(splitted_subs[0]),
|
|
|
|
"forced": True if len(splitted_subs) > 1 else False}
|
|
|
|
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=data)
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
class WantedMovies(Resource):
|
|
|
|
def get(self):
|
2019-12-17 00:41:50 +00:00
|
|
|
start = request.args.get('start') or 0
|
|
|
|
length = request.args.get('length') or -1
|
|
|
|
draw = request.args.get('draw')
|
|
|
|
|
2019-12-16 04:44:30 +00:00
|
|
|
if settings.radarr.getboolean('only_monitored'):
|
|
|
|
monitored_only_query_string = " AND monitored='True'"
|
|
|
|
else:
|
|
|
|
monitored_only_query_string = ''
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
row_count = database.execute("SELECT COUNT(*) as count FROM table_movies", only_one=True)['count']
|
2019-12-16 04:44:30 +00:00
|
|
|
data = database.execute("SELECT title, missing_subtitles, radarrId, path, hearing_impaired, sceneName, "
|
|
|
|
"failedAttempts FROM table_movies WHERE missing_subtitles != '[]'" +
|
2019-12-17 00:41:50 +00:00
|
|
|
monitored_only_query_string + " ORDER BY _rowid_ DESC LIMIT ? OFFSET ?",
|
|
|
|
(length, start))
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
for item in data:
|
|
|
|
# Parse missing subtitles
|
|
|
|
if item['missing_subtitles']:
|
|
|
|
item.update({"missing_subtitles": ast.literal_eval(item['missing_subtitles'])})
|
|
|
|
for i, subs in enumerate(item['missing_subtitles']):
|
|
|
|
splitted_subs = subs.split(':')
|
|
|
|
item['missing_subtitles'][i] = {"name": language_from_alpha2(splitted_subs[0]),
|
|
|
|
"code2": splitted_subs[0],
|
|
|
|
"code3": alpha3_from_alpha2(splitted_subs[0]),
|
|
|
|
"forced": True if len(splitted_subs) > 1 else False}
|
|
|
|
|
|
|
|
# Provide mapped path
|
|
|
|
mapped_path = path_replace_movie(item['path'])
|
|
|
|
item.update({"mapped_path": mapped_path})
|
|
|
|
|
|
|
|
# Confirm if path exist
|
|
|
|
item.update({"exist": os.path.isfile(mapped_path)})
|
|
|
|
|
2019-12-17 00:41:50 +00:00
|
|
|
return jsonify(draw=draw, recordsTotal=row_count, recordsFiltered=row_count, data=data)
|
2019-12-16 04:44:30 +00:00
|
|
|
|
|
|
|
|
2019-12-28 05:52:00 +00:00
|
|
|
api.add_resource(Badges, '/badges')
|
2020-01-22 04:54:32 +00:00
|
|
|
api.add_resource(Languages, '/languages')
|
2019-12-28 05:52:00 +00:00
|
|
|
api.add_resource(Series, '/series')
|
|
|
|
api.add_resource(Episodes, '/episodes')
|
2020-01-07 03:26:28 +00:00
|
|
|
api.add_resource(EpisodesSubtitlesDelete, '/episodes_subtitles_delete')
|
|
|
|
api.add_resource(EpisodesSubtitlesDownload, '/episodes_subtitles_download')
|
|
|
|
api.add_resource(EpisodesSubtitlesManualSearch, '/episodes_subtitles_manual_search')
|
|
|
|
api.add_resource(EpisodesSubtitlesManualDownload, '/episodes_subtitles_manual_download')
|
|
|
|
api.add_resource(EpisodesSubtitlesUpload, '/episodes_subtitles_upload')
|
2020-01-22 04:54:32 +00:00
|
|
|
api.add_resource(EpisodesScanDisk, '/episodes_scan_disk')
|
|
|
|
api.add_resource(EpisodesSearchMissing, '/episodes_search_missing')
|
2020-01-23 04:10:33 +00:00
|
|
|
api.add_resource(EpisodesHistory, '/episodes_history')
|
2019-12-28 05:52:00 +00:00
|
|
|
api.add_resource(Movies, '/movies')
|
|
|
|
api.add_resource(HistorySeries, '/history_series')
|
|
|
|
api.add_resource(HistoryMovies, '/history_movies')
|
|
|
|
api.add_resource(WantedSeries, '/wanted_series')
|
|
|
|
api.add_resource(WantedMovies, '/wanted_movies')
|