2018-10-31 16:08:29 +00:00
|
|
|
# coding=utf-8
|
2019-09-17 02:04:27 +00:00
|
|
|
from __future__ import absolute_import
|
2021-06-25 22:40:02 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
from json import JSONDecodeError
|
2018-10-31 16:08:29 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2021-06-08 22:44:30 +00:00
|
|
|
import urllib.parse
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
from requests import Session
|
2022-11-13 23:41:42 +00:00
|
|
|
from subliminal import Episode
|
|
|
|
from subliminal import Movie
|
2018-10-31 16:08:29 +00:00
|
|
|
from subliminal_patch.providers import Provider
|
|
|
|
from subliminal_patch.providers.mixins import ProviderSubtitleArchiveMixin
|
2022-11-13 23:41:42 +00:00
|
|
|
from subliminal_patch.providers.utils import get_archive_from_bytes
|
|
|
|
from subliminal_patch.providers.utils import get_subtitle_from_archive
|
|
|
|
from subliminal_patch.providers.utils import update_matches
|
|
|
|
from subliminal_patch.subtitle import Subtitle
|
2018-10-31 16:08:29 +00:00
|
|
|
from subzero.language import Language
|
|
|
|
|
2021-11-30 21:40:51 +00:00
|
|
|
BASE_URL = "https://argenteam.net"
|
|
|
|
API_URL = f"{BASE_URL}/api/v1"
|
2021-01-25 21:17:34 +00:00
|
|
|
|
2018-10-31 16:08:29 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class ArgenteamSubtitle(Subtitle):
|
2021-01-25 21:17:34 +00:00
|
|
|
provider_name = "argenteam"
|
2018-10-31 16:08:29 +00:00
|
|
|
hearing_impaired_verifiable = False
|
|
|
|
|
2021-01-25 21:17:34 +00:00
|
|
|
def __init__(self, language, page_link, download_link, release_info, matches):
|
|
|
|
super(ArgenteamSubtitle, self).__init__(language, page_link=page_link)
|
2022-11-13 23:41:42 +00:00
|
|
|
|
|
|
|
self._found_matches = matches
|
|
|
|
|
2019-05-25 16:11:40 +00:00
|
|
|
self.page_link = page_link
|
2018-10-31 16:08:29 +00:00
|
|
|
self.download_link = download_link
|
2022-11-13 23:41:42 +00:00
|
|
|
self.release_info = release_info
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def id(self):
|
|
|
|
return self.download_link
|
|
|
|
|
|
|
|
def get_matches(self, video):
|
2022-11-13 23:41:42 +00:00
|
|
|
update_matches(self._found_matches, video, self.release_info)
|
2021-06-08 22:44:30 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
return self._found_matches
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ArgenteamProvider(Provider, ProviderSubtitleArchiveMixin):
|
2021-01-25 21:17:34 +00:00
|
|
|
provider_name = "argenteam"
|
2022-11-13 23:41:42 +00:00
|
|
|
|
2021-06-06 13:57:29 +00:00
|
|
|
languages = {Language("spa", "MX")}
|
2018-10-31 16:08:29 +00:00
|
|
|
video_types = (Episode, Movie)
|
|
|
|
subtitle_class = ArgenteamSubtitle
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
_default_lang = Language("spa", "MX")
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
2021-11-30 21:40:51 +00:00
|
|
|
self.session = Session()
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
def initialize(self):
|
2021-06-06 13:57:29 +00:00
|
|
|
self.session.headers.update(
|
|
|
|
{"User-Agent": os.environ.get("SZ_USER_AGENT", "Sub-Zero/2")}
|
|
|
|
)
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
def terminate(self):
|
|
|
|
self.session.close()
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
def query(self, video):
|
2021-11-30 21:40:51 +00:00
|
|
|
is_episode = isinstance(video, Episode)
|
2022-11-13 23:41:42 +00:00
|
|
|
imdb_id = video.series_imdb_id if is_episode else video.imdb_id
|
|
|
|
|
|
|
|
if not imdb_id:
|
|
|
|
logger.debug("%s doesn't have IMDB ID. Can't search")
|
|
|
|
return []
|
|
|
|
|
2021-11-30 21:40:51 +00:00
|
|
|
if is_episode:
|
|
|
|
argenteam_ids = self._search_ids(
|
2022-11-13 23:41:42 +00:00
|
|
|
imdb_id, season=video.season, episode=video.episode
|
2021-11-30 21:40:51 +00:00
|
|
|
)
|
|
|
|
else:
|
2022-11-13 23:41:42 +00:00
|
|
|
argenteam_ids = self._search_ids(imdb_id)
|
2021-11-30 21:40:51 +00:00
|
|
|
|
|
|
|
if not argenteam_ids:
|
2022-11-13 23:41:42 +00:00
|
|
|
logger.debug("No IDs found")
|
2021-11-30 21:40:51 +00:00
|
|
|
return []
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
return self._parse_subtitles(argenteam_ids, is_episode)
|
|
|
|
|
|
|
|
def _parse_subtitles(self, ids, is_episode=True):
|
|
|
|
movie_kind = "episode" if is_episode else "movie"
|
|
|
|
|
2021-11-30 21:40:51 +00:00
|
|
|
subtitles = []
|
2022-11-13 23:41:42 +00:00
|
|
|
|
|
|
|
for aid in ids:
|
|
|
|
response = self.session.get(
|
|
|
|
f"{API_URL}/{movie_kind}", params={"id": aid}, timeout=10
|
|
|
|
)
|
2021-11-30 21:40:51 +00:00
|
|
|
response.raise_for_status()
|
|
|
|
|
|
|
|
try:
|
|
|
|
content = response.json()
|
|
|
|
except JSONDecodeError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not content or not content.get("releases"):
|
|
|
|
continue
|
|
|
|
|
|
|
|
for r in content["releases"]:
|
|
|
|
for s in r["subtitles"]:
|
|
|
|
page_link = f"{BASE_URL}/{movie_kind}/{aid}"
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
release_info = self._combine_release_info(r, s)
|
2022-02-21 05:50:56 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
logger.debug("Got release info: %s", release_info)
|
2021-11-30 21:40:51 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
download_link = s["uri"].replace("http://", "https://")
|
|
|
|
|
|
|
|
# Already matched within query
|
|
|
|
if is_episode:
|
2022-11-13 23:45:49 +00:00
|
|
|
matches = {"series", "title", "season", "episode", "imdb_id", "year"}
|
2022-11-13 23:41:42 +00:00
|
|
|
else:
|
|
|
|
matches = {"title", "year", "imdb_id"}
|
|
|
|
|
|
|
|
subtitles.append(
|
|
|
|
ArgenteamSubtitle(
|
|
|
|
self._default_lang,
|
|
|
|
page_link,
|
|
|
|
download_link,
|
|
|
|
release_info,
|
|
|
|
matches,
|
|
|
|
)
|
|
|
|
)
|
2021-11-30 21:40:51 +00:00
|
|
|
|
|
|
|
return subtitles
|
|
|
|
|
|
|
|
def list_subtitles(self, video, languages):
|
2022-11-13 23:41:42 +00:00
|
|
|
return self.query(video)
|
2021-11-30 21:40:51 +00:00
|
|
|
|
|
|
|
def download_subtitle(self, subtitle):
|
|
|
|
r = self.session.get(subtitle.download_link, timeout=10)
|
|
|
|
r.raise_for_status()
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
archive = get_archive_from_bytes(r.content)
|
|
|
|
subtitle.content = get_subtitle_from_archive(archive)
|
2021-11-30 21:40:51 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
def _search_ids(self, identifier, **kwargs):
|
|
|
|
"""
|
|
|
|
:param identifier: imdb_id or title (without year)
|
|
|
|
"""
|
|
|
|
identifier = identifier.lstrip("tt")
|
2018-10-31 16:08:29 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
query = identifier
|
2021-01-25 21:17:34 +00:00
|
|
|
if kwargs.get("season") and kwargs.get("episode"):
|
2022-11-13 23:41:42 +00:00
|
|
|
query = f"{identifier} S{kwargs['season']:02}E{kwargs['episode']:02}"
|
2021-01-25 21:17:34 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
logger.debug("Searching ID for %s", query)
|
2018-10-31 16:08:29 +00:00
|
|
|
|
2021-11-30 21:40:51 +00:00
|
|
|
r = self.session.get(f"{API_URL}/search", params={"q": query}, timeout=10)
|
2018-10-31 16:08:29 +00:00
|
|
|
r.raise_for_status()
|
2021-01-25 21:17:34 +00:00
|
|
|
|
2021-06-25 22:40:02 +00:00
|
|
|
try:
|
|
|
|
results = r.json()
|
|
|
|
except JSONDecodeError:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if not results.get("results"):
|
|
|
|
return []
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
match_ids = [result["id"] for result in results["results"]]
|
|
|
|
logger.debug("Found matching IDs: %s", match_ids)
|
2018-10-31 16:08:29 +00:00
|
|
|
|
|
|
|
return match_ids
|
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
def _combine_release_info(self, release_dict, subtitle_dict):
|
|
|
|
releases = [
|
|
|
|
urllib.parse.unquote(subtitle_dict.get("uri", "Unknown").split("/")[-1])
|
|
|
|
]
|
2021-01-25 21:17:34 +00:00
|
|
|
|
2022-11-13 23:41:42 +00:00
|
|
|
combine = [
|
|
|
|
release_dict.get(key)
|
|
|
|
for key in ("source", "codec", "tags")
|
|
|
|
if release_dict.get(key)
|
|
|
|
]
|
2021-01-25 21:17:34 +00:00
|
|
|
|
|
|
|
if combine:
|
2022-11-13 23:41:42 +00:00
|
|
|
r_info = ".".join(combine)
|
|
|
|
if release_dict.get("team"):
|
|
|
|
r_info += f"-{release_dict['team']}"
|
|
|
|
|
|
|
|
releases.append(r_info)
|
|
|
|
|
|
|
|
return "\n".join(releases)
|