2017-10-20 12:59:21 +00:00
|
|
|
import os
|
2017-09-16 00:49:46 +00:00
|
|
|
import sqlite3
|
|
|
|
import requests
|
2017-12-06 04:07:37 +00:00
|
|
|
import logging
|
2017-09-16 00:49:46 +00:00
|
|
|
|
2017-10-16 23:27:19 +00:00
|
|
|
from get_general_settings import *
|
2017-09-28 01:55:21 +00:00
|
|
|
from list_subtitles import *
|
2017-12-20 03:25:10 +00:00
|
|
|
|
2018-05-27 03:01:57 +00:00
|
|
|
def update_all_episodes():
|
|
|
|
series_full_scan_subtitles()
|
|
|
|
logging.info('All existing subtitles indexed from disk.')
|
|
|
|
list_missing_subtitles()
|
|
|
|
logging.info('All missing subtitles updated in database.')
|
|
|
|
|
|
|
|
def update_all_movies():
|
|
|
|
movies_full_scan_subtitles()
|
2018-04-09 03:49:57 +00:00
|
|
|
logging.info('All existing subtitles indexed from disk.')
|
|
|
|
list_missing_subtitles()
|
|
|
|
logging.info('All missing subtitles updated in database.')
|
|
|
|
|
|
|
|
def sync_episodes():
|
2017-12-20 03:25:10 +00:00
|
|
|
from get_sonarr_settings import get_sonarr_settings
|
|
|
|
url_sonarr = get_sonarr_settings()[0]
|
|
|
|
apikey_sonarr = get_sonarr_settings()[2]
|
|
|
|
|
2017-10-03 02:59:45 +00:00
|
|
|
# Open database connection
|
2017-12-05 00:01:10 +00:00
|
|
|
db = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'data/db/bazarr.db'), timeout=30)
|
2017-10-03 02:59:45 +00:00
|
|
|
c = db.cursor()
|
2017-09-16 00:49:46 +00:00
|
|
|
|
2017-10-16 23:27:19 +00:00
|
|
|
# Get current episodes id in DB
|
2018-04-09 03:49:57 +00:00
|
|
|
current_episodes_db = c.execute('SELECT sonarrSeriesId, sonarrEpisodeId, title, path, season, episode, scene_name FROM table_episodes').fetchall()
|
|
|
|
|
2017-10-03 02:59:45 +00:00
|
|
|
# Get sonarrId for each series from database
|
2017-10-16 23:27:19 +00:00
|
|
|
current_episodes_sonarr = []
|
2018-04-09 03:49:57 +00:00
|
|
|
seriesIdList = c.execute("SELECT sonarrSeriesId FROM table_shows").fetchall()
|
2017-10-03 02:59:45 +00:00
|
|
|
for seriesId in seriesIdList:
|
|
|
|
# Get episodes data for a series from Sonarr
|
2018-01-19 01:53:16 +00:00
|
|
|
url_sonarr_api_episode = url_sonarr + "/api/episode?seriesId=" + str(seriesId[0]) + "&apikey=" + apikey_sonarr
|
2017-10-03 02:59:45 +00:00
|
|
|
r = requests.get(url_sonarr_api_episode)
|
|
|
|
for episode in r.json():
|
2017-11-21 03:06:30 +00:00
|
|
|
if episode['hasFile'] and episode['episodeFile']['size'] > 20480:
|
2017-10-16 23:27:19 +00:00
|
|
|
# Add shows in Sonarr to current shows list
|
2018-02-22 03:07:40 +00:00
|
|
|
if 'sceneName' in episode['episodeFile']:
|
|
|
|
sceneName = episode['episodeFile']['sceneName']
|
|
|
|
else:
|
|
|
|
sceneName = None
|
2018-04-09 03:49:57 +00:00
|
|
|
current_episodes_sonarr.append((episode['seriesId'], episode['id'], episode['title'], episode['episodeFile']['path'], episode['seasonNumber'], episode['episodeNumber'], sceneName))
|
2017-09-16 00:49:46 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
added_episodes = list(set(current_episodes_sonarr) - set(current_episodes_db))
|
|
|
|
removed_episodes = list(set(current_episodes_db) - set(current_episodes_sonarr))
|
2017-10-16 23:27:19 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
for removed_episode in removed_episodes:
|
|
|
|
c.execute('DELETE FROM table_episodes WHERE sonarrEpisodeId = ?', (removed_episode[1],))
|
|
|
|
db.commit()
|
2017-10-16 23:27:19 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
for added_episode in added_episodes:
|
|
|
|
c.execute('''INSERT INTO table_episodes(sonarrSeriesId, sonarrEpisodeId, title, path, season, episode, scene_name) VALUES (?, ?, ?, ?, ?, ?, ?)''', added_episode)
|
|
|
|
db.commit()
|
|
|
|
store_subtitles(path_replace(added_episode[3]))
|
2017-11-16 02:07:21 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
# Close database connection
|
|
|
|
c.close()
|
2017-11-16 02:07:21 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
logging.debug('All episodes synced from Sonarr into database.')
|
2017-10-16 23:27:19 +00:00
|
|
|
|
2018-04-09 03:49:57 +00:00
|
|
|
list_missing_subtitles()
|
|
|
|
logging.debug('All missing subtitles updated in database.')
|