From 0fcef4d6743bcb404c99f0efcfdcc94d5951c58b Mon Sep 17 00:00:00 2001 From: Panagiotis Koutsias Date: Sun, 14 Jul 2019 03:29:05 +0300 Subject: [PATCH] Optimizes Greek providers (#489) * Optimizes providers * Adds episode id in subtitle page link * Adds episode id in subtitle page link --- .../providers/greeksubtitles.py | 3 +- libs/subliminal_patch/providers/subs4free.py | 93 ++++++++-------- .../subliminal_patch/providers/subs4series.py | 82 +++++++------- libs/subliminal_patch/providers/subz.py | 101 ++++++++---------- libs/subliminal_patch/providers/xsubs.py | 54 +++++----- 5 files changed, 154 insertions(+), 179 deletions(-) diff --git a/libs/subliminal_patch/providers/greeksubtitles.py b/libs/subliminal_patch/providers/greeksubtitles.py index 98dfc289e..979f4e191 100644 --- a/libs/subliminal_patch/providers/greeksubtitles.py +++ b/libs/subliminal_patch/providers/greeksubtitles.py @@ -8,7 +8,6 @@ import rarfile from subzero.language import Language from guessit import guessit from requests import Session -from six import text_type from subliminal import __short_version__ from subliminal.providers import ParserBeautifulSoup, Provider @@ -75,7 +74,7 @@ class GreekSubtitlesProvider(Provider): logger.debug('Searching subtitles %r', params) subtitles = [] - search_link = self.server_url + text_type(self.search_url).format(params) + search_link = self.server_url + self.search_url.format(params) while True: r = self.session.get(search_link, timeout=30) r.raise_for_status() diff --git a/libs/subliminal_patch/providers/subs4free.py b/libs/subliminal_patch/providers/subs4free.py index 181b99351..153b9f469 100644 --- a/libs/subliminal_patch/providers/subs4free.py +++ b/libs/subliminal_patch/providers/subs4free.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# encoding=utf8 import io import logging import os @@ -12,7 +11,6 @@ import zipfile from subzero.language import Language from guessit import guessit from requests import Session -from six import text_type from subliminal.providers import ParserBeautifulSoup, Provider from subliminal import __short_version__ @@ -75,6 +73,9 @@ class Subs4FreeProvider(Provider): server_url = 'https://www.sf4-industry.com' download_url = '/getSub.html' search_url = '/search_report.php?search={}&searchType=1' + anti_block_1 = 'https://images.subs4free.info/favicon.ico' + anti_block_2 = 'https://www.subs4series.com/includes/anti-block-layover.php?launch=1' + anti_block_3 = 'https://www.subs4series.com/includes/anti-block.php' subtitle_class = Subs4FreeSubtitle def __init__(self): @@ -87,62 +88,51 @@ class Subs4FreeProvider(Provider): def terminate(self): self.session.close() - def get_show_ids(self, title, year=None): - """Get the best matching show id for `series` and `year``. + def get_show_links(self, title, year=None): + """Get the matching show links for `title` and `year`. First search in the result of :meth:`_get_show_suggestions`. :param title: show title. :param year: year of the show, if any. :type year: int - :return: the show id, if found. - :rtype: str + :return: the show links, if found. + :rtype: list of str """ - title_sanitized = sanitize(title).lower() - show_ids = self._get_suggestions(title) + title = sanitize(title) + suggestions = self._get_suggestions(title) - matched_show_ids = [] - for show in show_ids: - show_id = None - show_title = sanitize(show['title']) - # attempt with year - if not show_id and year: - logger.debug('Getting show id with year') - show_id = show['link'].split('?p=')[-1] if show_title == '{title} {year:d}'.format( - title=title_sanitized, year=year) else None + show_links = [] + for suggestion in suggestions: + show_title = sanitize(suggestion['title']) - # attempt clean - if not show_id: + if show_title == title or (year and show_title == '{title} {year:d}'.format(title=title, year=year)): logger.debug('Getting show id') - show_id = show['link'].split('?p=')[-1] if show_title == title_sanitized else None + show_links.append(suggestion['link'].split('?p=')[-1]) - if show_id: - matched_show_ids.append(show_id) + return show_links - return matched_show_ids - - @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type, - should_cache_fn=lambda value: value) + @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value) def _get_suggestions(self, title): """Search the show or movie id from the `title` and `year`. :param str title: title of the show. :return: the show suggestions found. - :rtype: dict + :rtype: list of dict """ # make the search logger.info('Searching show ids with %r', title) - r = self.session.get(self.server_url + text_type(self.search_url).format(title), + r = self.session.get(self.server_url + self.search_url.format(title), headers={'Referer': self.server_url}, timeout=10) r.raise_for_status() if not r.content: logger.debug('No data returned from provider') - return {} + return [] - soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) + soup = ParserBeautifulSoup(r.content, ['html.parser']) suggestions = [{'link': l.attrs['value'], 'title': l.text} for l in soup.select('select[name="Mov_sel"] > option[value]')] logger.debug('Found suggestions: %r', suggestions) @@ -155,7 +145,7 @@ class Subs4FreeProvider(Provider): if movie_id: page_link = self.server_url + '/' + movie_id else: - page_link = self.server_url + text_type(self.search_url).format(' '.join([title, str(year)])) + page_link = self.server_url + self.search_url.format(' '.join([title, str(year)])) r = self.session.get(page_link, timeout=10) r.raise_for_status() @@ -166,26 +156,26 @@ class Subs4FreeProvider(Provider): soup = ParserBeautifulSoup(r.content, ['html.parser']) - year_num = None + year = None year_element = soup.select_one('td#dates_header > table div') matches = False if year_element: matches = year_re.match(str(year_element.contents[2]).strip()) if matches: - year_num = int(matches.group(1)) + year = int(matches.group(1)) - title_element = soup.select_one('td#dates_header > table u') - show_title = str(title_element.contents[0]).strip() if title_element else None + title_tag = soup.select_one('td#dates_header > table u') + show_title = str(title_tag.contents[0]).strip() if title_tag else None subtitles = [] # loop over episode rows - for subtitle in soup.select('table.table_border div[align="center"] > div'): + for subs_tag in soup.select('table .seeDark,.seeMedium'): # read common info - version = subtitle.find('b').text - download_link = self.server_url + subtitle.find('a')['href'] - language = Language.fromalpha2(subtitle.find('img')['src'].split('/')[-1].split('.')[0]) + version = subs_tag.find('b').text + download_link = self.server_url + subs_tag.find('a')['href'] + language = Language.fromalpha2(subs_tag.find('img')['src'].split('/')[-1].split('.')[0]) - subtitle = self.subtitle_class(language, page_link, show_title, year_num, version, download_link) + subtitle = self.subtitle_class(language, page_link, show_title, year, version, download_link) logger.debug('Found subtitle {!r}'.format(subtitle)) subtitles.append(subtitle) @@ -196,19 +186,19 @@ class Subs4FreeProvider(Provider): # lookup show_id titles = [video.title] + video.alternative_titles if isinstance(video, Movie) else [] - show_ids = None + show_links = None for title in titles: - show_ids = self.get_show_ids(title, video.year) - if show_ids and len(show_ids) > 0: + show_links = self.get_show_links(title, video.year) + if show_links: break subtitles = [] # query for subtitles with the show_id - if show_ids and len(show_ids) > 0: - for show_id in show_ids: - subtitles += [s for s in self.query(show_id, video.title, video.year) if s.language in languages] + if show_links: + for show_link in show_links: + subtitles += [s for s in self.query(show_link, video.title, video.year) if s.language in languages] else: - subtitles += [s for s in self.query(None, video.title, video.year) if s.language in languages] + subtitles += [s for s in self.query(None, sanitize(video.title), video.year) if s.language in languages] return subtitles @@ -234,8 +224,10 @@ class Subs4FreeProvider(Provider): logger.debug('Unable to download subtitle. No download link found') return + self.apply_anti_block(subtitle) + download_url = self.server_url + self.download_url - r = self.session.post(download_url, data={'utf8': 1, 'id': subtitle_id, 'x': random.randint(0, width), + r = self.session.post(download_url, data={'id': subtitle_id, 'x': random.randint(0, width), 'y': random.randint(0, height)}, headers={'Referer': subtitle.download_link}, timeout=10) r.raise_for_status() @@ -253,6 +245,11 @@ class Subs4FreeProvider(Provider): else: logger.debug('Could not extract subtitle from %r', archive) + def apply_anti_block(self, subtitle): + self.session.get(self.anti_block_1, headers={'Referer': subtitle.download_link}, timeout=10) + self.session.get(self.anti_block_2, headers={'Referer': subtitle.download_link}, timeout=10) + self.session.get(self.anti_block_3, headers={'Referer': subtitle.download_link}, timeout=10) + def _get_archive(content): # open the archive diff --git a/libs/subliminal_patch/providers/subs4series.py b/libs/subliminal_patch/providers/subs4series.py index 5f381feeb..d58d8aa23 100644 --- a/libs/subliminal_patch/providers/subs4series.py +++ b/libs/subliminal_patch/providers/subs4series.py @@ -10,7 +10,6 @@ import zipfile from subzero.language import Language from guessit import guessit from requests import Session -from six import text_type from subliminal.providers import ParserBeautifulSoup, Provider from subliminal import __short_version__ @@ -73,6 +72,8 @@ class Subs4SeriesProvider(Provider): server_url = 'https://www.subs4series.com' search_url = '/search_report.php?search={}&searchType=1' episode_link = '/tv-series/{show_id}/season-{season:d}/episode-{episode:d}' + anti_block_1 = '/includes/anti-block-layover.php?launch=1' + anti_block_2 = '/includes/anti-block.php' subtitle_class = Subs4SeriesSubtitle def __init__(self): @@ -85,62 +86,51 @@ class Subs4SeriesProvider(Provider): def terminate(self): self.session.close() - def get_show_ids(self, title, year=None): - """Get the best matching show id for `series` and `year`. + def get_show_links(self, title, year=None): + """Get the matching show links for `title` and `year`. First search in the result of :meth:`_get_show_suggestions`. :param title: show title. :param year: year of the show, if any. :type year: int - :return: the show id, if found. - :rtype: str + :return: the show links, if found. + :rtype: list of str """ - title_sanitized = sanitize(title).lower() - show_ids = self._get_suggestions(title) + title = sanitize(title) + suggestions = self._get_suggestions(title) - matched_show_ids = [] - for show in show_ids: - show_id = None - show_title = sanitize(show['title']) - # attempt with year - if not show_id and year: - logger.debug('Getting show id with year') - show_id = '/'.join(show['link'].rsplit('/', 2)[1:]) if show_title == '{title} {year:d}'.format( - title=title_sanitized, year=year) else None + show_links = [] + for suggestion in suggestions: + show_title = sanitize(suggestion['title']) - # attempt clean - if not show_id: - logger.debug('Getting show id') - show_id = '/'.join(show['link'].rsplit('/', 2)[1:]) if show_title == title_sanitized else None + if show_title == title or (year and show_title == '{title} {year:d}'.format(title=title, year=year)): + logger.debug('Getting show link') + show_links.append('/'.join(suggestion['link'].rsplit('/', 2)[1:])) - if show_id: - matched_show_ids.append(show_id) + return show_links - return matched_show_ids - - @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type, - should_cache_fn=lambda value: value) + @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value) def _get_suggestions(self, title): """Search the show or movie id from the `title` and `year`. :param str title: title of the show. :return: the show suggestions found. - :rtype: dict + :rtype: list of dict """ # make the search logger.info('Searching show ids with %r', title) - r = self.session.get(self.server_url + text_type(self.search_url).format(title), + r = self.session.get(self.server_url + self.search_url.format(title), headers={'Referer': self.server_url}, timeout=10) r.raise_for_status() if not r.content: logger.debug('No data returned from provider') - return {} + return [] - soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) + soup = ParserBeautifulSoup(r.content, ['html.parser']) series = [{'link': l.attrs['value'], 'title': l.text} for l in soup.select('select[name="Mov_sel"] > option[value]')] logger.debug('Found suggestions: %r', series) @@ -164,21 +154,21 @@ class Subs4SeriesProvider(Provider): soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) - year_num = None + year = None matches = year_re.match(str(soup.select_one('#dates_header_br > table div').contents[2]).strip()) if matches: - year_num = int(matches.group(1)) - show_title = str(soup.select_one('#dates_header_br > table u').contents[0]).strip() + year = int(matches.group(1)) + show_title = str(soup.select_one('#dates_header_br > table div u').string).strip() subtitles = [] # loop over episode rows - for subtitle in soup.select('table.table_border div[align="center"] > div'): + for subs_tag in soup.select('table .seeDark,.seeMedium'): # read common info - version = subtitle.find('b').text - download_link = self.server_url + subtitle.find('a')['href'] - language = Language.fromalpha2(subtitle.find('img')['src'].split('/')[-1].split('.')[0]) + version = subs_tag.find('b').text + download_link = self.server_url + subs_tag.find('a')['href'] + language = Language.fromalpha2(subs_tag.find('img')['src'].split('/')[-1].split('.')[0]) - subtitle = self.subtitle_class(language, page_link, show_title, year_num, version, download_link) + subtitle = self.subtitle_class(language, page_link, show_title, year, version, download_link) logger.debug('Found subtitle %r', subtitle) subtitles.append(subtitle) @@ -189,16 +179,16 @@ class Subs4SeriesProvider(Provider): # lookup show_id titles = [video.series] + video.alternative_series if isinstance(video, Episode) else [] - show_ids = None + show_links = None for title in titles: - show_ids = self.get_show_ids(title, video.year) - if show_ids and len(show_ids) > 0: + show_links = self.get_show_links(title, video.year) + if show_links: break subtitles = [] # query for subtitles with the show_id - for show_id in show_ids: - subtitles += [s for s in self.query(show_id, video.series, video.season, video.episode, video.title) + for show_link in show_links: + subtitles += [s for s in self.query(show_link, video.series, video.season, video.episode, video.title) if s.language in languages] return subtitles @@ -226,6 +216,8 @@ class Subs4SeriesProvider(Provider): logger.debug('Unable to download subtitle. No download link found') return + self.apply_anti_block(subtitle) + download_url = self.server_url + target r = self.session.get(download_url, headers={'Referer': subtitle.download_link}, timeout=10) r.raise_for_status() @@ -242,6 +234,10 @@ class Subs4SeriesProvider(Provider): else: logger.debug('Could not extract subtitle from %r', archive) + def apply_anti_block(self, subtitle): + self.session.get(self.server_url + self.anti_block_1, headers={'Referer': subtitle.download_link}, timeout=10) + self.session.get(self.server_url + self.anti_block_2, headers={'Referer': subtitle.download_link}, timeout=10) + def _get_archive(content): # open the archive diff --git a/libs/subliminal_patch/providers/subz.py b/libs/subliminal_patch/providers/subz.py index dc95cb8d7..1519c7f9e 100644 --- a/libs/subliminal_patch/providers/subz.py +++ b/libs/subliminal_patch/providers/subz.py @@ -11,7 +11,6 @@ import zipfile from subzero.language import Language from guessit import guessit from requests import Session -from six import text_type from subliminal.providers import ParserBeautifulSoup, Provider from subliminal import __short_version__ @@ -113,8 +112,8 @@ class SubzProvider(Provider): def terminate(self): self.session.close() - def get_show_ids(self, title, year=None, is_episode=True, country_code=None): - """Get the best matching show id for `series`, `year` and `country_code`. + def get_show_links(self, title, year=None, is_episode=True): + """Get the matching show links for `title` and `year`. First search in the result of :meth:`_get_show_suggestions`. @@ -123,61 +122,41 @@ class SubzProvider(Provider): :type year: int :param is_episode: if the search is for episode. :type is_episode: bool - :param country_code: country code of the show, if any. - :type country_code: str - :return: the show id, if found. - :rtype: str + :return: the show links, if found. + :rtype: list of str """ - title_sanitized = sanitize(title).lower() - show_ids = self._get_suggestions(title, is_episode) + title = sanitize(title) + suggestions = self._get_suggestions(title, is_episode) - matched_show_ids = [] - for show in show_ids: - show_id = None - # attempt with country - if not show_id and country_code: - logger.debug('Getting show id with country') - if sanitize(show['title']) == text_type('{title} {country}').format(title=title_sanitized, - country=country_code.lower()): - show_id = show['link'].split('/')[-1] - - # attempt with year - if not show_id and year: - logger.debug('Getting show id with year') - if sanitize(show['title']) == text_type('{title} {year}').format(title=title_sanitized, year=year): - show_id = show['link'].split('/')[-1] - - # attempt clean - if not show_id: + show_links = [] + for suggestion in suggestions: + if sanitize(suggestion['title']) == title or \ + (year and sanitize(suggestion['title']) == '{title} {year}'.format(title=title, year=year)): logger.debug('Getting show id') - show_id = show['link'].split('/')[-1] if sanitize(show['title']) == title_sanitized else None + show_links.append(suggestion['link'].split('/')[-1]) - if show_id: - matched_show_ids.append(show_id) + return show_links - return matched_show_ids - - @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, to_str=text_type, - should_cache_fn=lambda value: value) + @region.cache_on_arguments(expiration_time=SHOW_EXPIRATION_TIME, should_cache_fn=lambda value: value) def _get_suggestions(self, title, is_episode=True): - """Search the show or movie id from the `title` and `year`. + """Search the show or movie id from the `title`. :param str title: title of the show. :param is_episode: if the search is for episode. :type is_episode: bool :return: the show suggestions found. - :rtype: dict + :rtype: list of dict """ # make the search logger.info('Searching show ids with %r', title) - r = self.session.get(self.server_url + text_type(self.search_url).format(title), timeout=10) + r = self.session.get(self.server_url + self.search_url.format(title), timeout=10) r.raise_for_status() if not r.content: logger.debug('No data returned from provider') - return {} + return [] show_type = 'series' if is_episode else 'movie' parsed_suggestions = [s for s in json.loads(r.text) if 'type' in s and s['type'] == show_type] @@ -198,6 +177,9 @@ class SubzProvider(Provider): return [] r = self.session.get(page_link, timeout=10) + if r.status_code == 404: + return [] + r.raise_for_status() if not r.content: @@ -206,36 +188,37 @@ class SubzProvider(Provider): soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) - year_num = None + year = None if not is_episode: - year_num = int(soup.select_one('span.year').text) - show_title = str(soup.select_one('#summary-wrapper > div.summary h1').contents[0]).strip() + year = int(soup.select_one('span.year').text) subtitles = [] # loop over episode rows - for subtitle in soup.select('div[id="subtitles"] tr[data-id]'): + for subs_tag in soup.select('div[id="subtitles"] tr[data-id]'): # read common info - version = subtitle.find('td', {'class': 'name'}).text - download_link = subtitle.find('a', {'class': 'btn-success'})['href'].strip('\'') + version = subs_tag.find('td', {'class': 'name'}).text + download_link = subs_tag.find('a', {'class': 'btn-success'})['href'].strip('\'') # read the episode info if is_episode: episode_numbers = soup.select_one('#summary-wrapper > div.container.summary span.main-title-sxe').text - season_num = None - episode_num = None + season = None + episode = None matches = episode_re.match(episode_numbers.strip()) if matches: - season_num = int(matches.group(1)) - episode_num = int(matches.group(2)) + season = int(matches.group(1)) + episode = int(matches.group(2)) - episode_title = soup.select_one('#summary-wrapper > div.container.summary span.main-title').text + series = soup.select_one('#summary-wrapper > div.summary h2 > a').string.strip() + title = soup.select_one('#summary-wrapper > div.container.summary span.main-title').text - subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, show_title, season_num, - episode_num, episode_title, year_num, version, download_link) + subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series, season, episode, title, + year, version, download_link) # read the movie info else: - subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, None, None, None, show_title, - year_num, version, download_link) + title = str(soup.select_one('#summary-wrapper > div.summary h1').contents[0]).strip() + subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, None, None, None, title, year, + version, download_link) logger.debug('Found subtitle %r', subtitle) subtitles.append(subtitle) @@ -251,20 +234,20 @@ class SubzProvider(Provider): else: titles = [] - show_ids = None + show_links = None for title in titles: - show_ids = self.get_show_ids(title, video.year, isinstance(video, Episode)) - if show_ids is not None and len(show_ids) > 0: + show_links = self.get_show_links(title, video.year, isinstance(video, Episode)) + if show_links is not None and len(show_links) > 0: break subtitles = [] # query for subtitles with the show_id - for show_id in show_ids: + for show_links in show_links: if isinstance(video, Episode): - subtitles += [s for s in self.query(show_id, video.series, video.season, video.episode, video.title) + subtitles += [s for s in self.query(show_links, video.series, video.season, video.episode, video.title) if s.language in languages and s.season == video.season and s.episode == video.episode] elif isinstance(video, Movie): - subtitles += [s for s in self.query(show_id, None, None, None, video.title) + subtitles += [s for s in self.query(show_links, None, None, None, video.title) if s.language in languages and s.year == video.year] return subtitles diff --git a/libs/subliminal_patch/providers/xsubs.py b/libs/subliminal_patch/providers/xsubs.py index 102571dd9..c584aacf9 100644 --- a/libs/subliminal_patch/providers/xsubs.py +++ b/libs/subliminal_patch/providers/xsubs.py @@ -17,6 +17,7 @@ from subliminal.video import Episode logger = logging.getLogger(__name__) article_re = re.compile(r'^([A-Za-z]{1,3}) (.*)$') +episode_re = re.compile(r'^(\d+)(-(\d+))*$') class XSubsSubtitle(Subtitle): @@ -80,7 +81,7 @@ class XSubsProvider(Provider): all_series_url = '/series/all.xml' series_url = '/series/{:d}/main.xml' season_url = '/series/{show_id:d}/{season:d}.xml' - page_link = '/ice/xsw.xml?srsid={show_id:d}#{season_id:d};{season:d}' + page_link = '/ice/xsw.xml?srsid={show_id:d}#{season_id:d};{season:d};{episode:d}' download_link = '/xthru/getsub/{:d}' subtitle_class = XSubsSubtitle @@ -147,7 +148,7 @@ class XSubsProvider(Provider): return show_ids - def get_show_id(self, series_names, year=None, country_code=None): + def get_show_id(self, series_names, year=None): series_sanitized_names = [] for name in series_names: sanitized_name = sanitize(name) @@ -160,14 +161,8 @@ class XSubsProvider(Provider): show_id = None for series_sanitized in series_sanitized_names: - # attempt with country - if not show_id and country_code: - logger.debug('Getting show id with country') - show_id = show_ids.get('{series} {country}'.format(series=series_sanitized, - country=country_code.lower())) - # attempt with year - if not show_id and year: + if year: logger.debug('Getting show id with year') show_id = show_ids.get('{series} {year:d}'.format(series=series_sanitized, year=year)) @@ -198,7 +193,7 @@ class XSubsProvider(Provider): soup = ParserBeautifulSoup(r.content, ['lxml', 'html.parser']) - series_title = soup.find('name').text + series = soup.find('name').text # loop over season rows seasons = soup.findAll('series_group') @@ -230,35 +225,40 @@ class XSubsProvider(Provider): subtitles = [] # loop over episode rows - for episode in soup.findAll('subg'): + for subtitle_group in soup.findAll('subg'): # read the episode info - etitle = episode.find('etitle') - if etitle is None: + episode_info = subtitle_group.find('etitle') + if episode_info is None: continue - episode_num = int(etitle['number'].split('-')[0]) + episodes = [] + episode_match = episode_re.match(episode_info['number']) + if episode_match: + episodes = [int(e) for e in [episode_match.group(1), episode_match.group(3)] if e] - sgt = episode.find('sgt') - if sgt is None: + subtitle_info = subtitle_group.find('sgt') + if subtitle_info is None: continue - season_num = int(sgt['ssnnum']) + season = int(subtitle_info['ssnnum']) + episode_id = int(subtitle_info['epsid']) # filter out unreleased subtitles - for subtitle in episode.findAll('sr'): - if subtitle['published_on'] == '': + for subs_tag in subtitle_group.findAll('sr'): + if subs_tag['published_on'] == '': continue page_link = self.server_url + self.page_link.format(show_id=show_id, season_id=season_id, - season=season_num) - episode_title = etitle['title'] - version = subtitle.fmt.text + ' ' + subtitle.team.text - download_link = self.server_url + self.download_link.format(int(subtitle['rlsid'])) + season=season, episode=episode_id) + title = episode_info['title'] + version = subs_tag.fmt.text + ' ' + subs_tag.team.text + download_link = self.server_url + self.download_link.format(int(subs_tag['rlsid'])) - subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series_title, season_num, - episode_num, year, episode_title, version, download_link) - logger.debug('Found subtitle %r', subtitle) - subtitles.append(subtitle) + for episode in episodes: + subtitle = self.subtitle_class(Language.fromalpha2('el'), page_link, series, season, episode, year, + title, version, download_link) + logger.debug('Found subtitle %r', subtitle) + subtitles.append(subtitle) return subtitles