From 8959a79103a2ac0f86d4fee62120605cb5dc5f06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Louis=20V=C3=A9zina?= <5130500+morpheus65535@users.noreply.github.com> Date: Fri, 15 Mar 2019 16:04:28 -0400 Subject: [PATCH 1/3] Added debug logging for video object properties. --- bazarr/get_subtitle.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bazarr/get_subtitle.py b/bazarr/get_subtitle.py index 7531d565e..dace4f554 100644 --- a/bazarr/get_subtitle.py +++ b/bazarr/get_subtitle.py @@ -67,6 +67,7 @@ def get_video(path, title, sceneName, use_scenename, providers=None, media_type= video.original_name = original_name video.original_path = original_path refine_from_db(original_path,video) + logging.debug('BAZARR is using those video object properties: %s', vars(video)) return video except: From b247500b686b6c75c8a84e2cf7eb1ebf35c7bc03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Louis=20V=C3=A9zina?= <5130500+morpheus65535@users.noreply.github.com> Date: Sat, 16 Mar 2019 15:36:02 -0400 Subject: [PATCH 2/3] Limit number of returned row to 10 to size down API serialized JSON size. --- bazarr/main.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bazarr/main.py b/bazarr/main.py index bfb98fe42..a04152635 100644 --- a/bazarr/main.py +++ b/bazarr/main.py @@ -1854,7 +1854,7 @@ def api_wanted(): db = sqlite3.connect(os.path.join(args.config_dir, 'db', 'bazarr.db'), timeout=30) c = db.cursor() data = c.execute( - "SELECT table_shows.title, table_episodes.season || 'x' || table_episodes.episode, table_episodes.title, table_episodes.missing_subtitles FROM table_episodes INNER JOIN table_shows on table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE table_episodes.missing_subtitles != '[]' ORDER BY table_episodes._rowid_ DESC").fetchall() + "SELECT table_shows.title, table_episodes.season || 'x' || table_episodes.episode, table_episodes.title, table_episodes.missing_subtitles FROM table_episodes INNER JOIN table_shows on table_shows.sonarrSeriesId = table_episodes.sonarrSeriesId WHERE table_episodes.missing_subtitles != '[]' ORDER BY table_episodes._rowid_ DESC LIMIT 10").fetchall() c.close() return dict(subtitles=data) @@ -1864,7 +1864,7 @@ def api_history(): db = sqlite3.connect(os.path.join(args.config_dir, 'db', 'bazarr.db'), timeout=30) c = db.cursor() data = c.execute( - "SELECT table_shows.title, table_episodes.season || 'x' || table_episodes.episode, table_episodes.title, strftime('%Y-%m-%d', datetime(table_history.timestamp, 'unixepoch')), table_history.description FROM table_history INNER JOIN table_shows on table_shows.sonarrSeriesId = table_history.sonarrSeriesId INNER JOIN table_episodes on table_episodes.sonarrEpisodeId = table_history.sonarrEpisodeId WHERE table_history.action = '1' ORDER BY id DESC").fetchall() + "SELECT table_shows.title, table_episodes.season || 'x' || table_episodes.episode, table_episodes.title, strftime('%Y-%m-%d', datetime(table_history.timestamp, 'unixepoch')), table_history.description FROM table_history INNER JOIN table_shows on table_shows.sonarrSeriesId = table_history.sonarrSeriesId INNER JOIN table_episodes on table_episodes.sonarrEpisodeId = table_history.sonarrEpisodeId WHERE table_history.action = '1' ORDER BY id DESC LIMIT 10").fetchall() c.close() return dict(subtitles=data) @@ -1874,7 +1874,7 @@ def api_wanted(): db = sqlite3.connect(os.path.join(args.config_dir, 'db', 'bazarr.db'), timeout=30) c = db.cursor() data = c.execute( - "SELECT table_movies.title, table_movies.missing_subtitles FROM table_movies WHERE table_movies.missing_subtitles != '[]' ORDER BY table_movies._rowid_ DESC").fetchall() + "SELECT table_movies.title, table_movies.missing_subtitles FROM table_movies WHERE table_movies.missing_subtitles != '[]' ORDER BY table_movies._rowid_ DESC LIMIT 10").fetchall() c.close() return dict(subtitles=data) @@ -1884,7 +1884,7 @@ def api_history(): db = sqlite3.connect(os.path.join(args.config_dir, 'db', 'bazarr.db'), timeout=30) c = db.cursor() data = c.execute( - "SELECT table_movies.title, strftime('%Y-%m-%d', datetime(table_history_movie.timestamp, 'unixepoch')), table_history_movie.description FROM table_history_movie INNER JOIN table_movies on table_movies.radarrId = table_history_movie.radarrId WHERE table_history_movie.action = '1' ORDER BY id DESC").fetchall() + "SELECT table_movies.title, strftime('%Y-%m-%d', datetime(table_history_movie.timestamp, 'unixepoch')), table_history_movie.description FROM table_history_movie INNER JOIN table_movies on table_movies.radarrId = table_history_movie.radarrId WHERE table_history_movie.action = '1' ORDER BY id DESC LIMIT 10").fetchall() c.close() return dict(subtitles=data) From 2392d45141b566e387b7d253ddf745ad7ab21293 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Louis=20V=C3=A9zina?= <5130500+morpheus65535@users.noreply.github.com> Date: Sun, 17 Mar 2019 14:57:23 -0400 Subject: [PATCH 3/3] Fix for bad logging to history when manually downloading a subtitles. --- bazarr/get_subtitle.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/bazarr/get_subtitle.py b/bazarr/get_subtitle.py index dace4f554..206edda63 100644 --- a/bazarr/get_subtitle.py +++ b/bazarr/get_subtitle.py @@ -405,8 +405,13 @@ def manual_download_subtitle(path, language, hi, subtitle, provider, providers_a 'BAZARR Post-processing result for file ' + path + ' : Nothing returned from command execution') else: logging.info('BAZARR Post-processing result for file ' + path + ' : ' + out) - - return message + + if media_type == 'series': + reversed_path = path_replace_reverse(path) + else: + reversed_path = path_replace_reverse_movie(path) + + return message, reversed_path, downloaded_language_code2, downloaded_provider, subtitle.score else: logging.error( "BAZARR Tried to manually download a subtitles for file: " + path + " but we weren't able to do (probably throttled by " + str(