mirror of https://github.com/morpheus65535/bazarr
Merge pull request #818 from mvanbaak/compileall
Fix python 3 incompatibility in libs/
This commit is contained in:
commit
fcfb678c22
|
@ -1,395 +0,0 @@
|
|||
# coding=utf-8
|
||||
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
|
||||
from copy import deepcopy
|
||||
from time import sleep
|
||||
from collections import OrderedDict
|
||||
from .jsfuck import jsunfuck
|
||||
|
||||
import js2py
|
||||
from requests.sessions import Session
|
||||
from subliminal_patch.pitcher import pitchers
|
||||
|
||||
try:
|
||||
from requests_toolbelt.utils import dump
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
from urlparse import urlunparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse
|
||||
from urllib.parse import urlunparse
|
||||
|
||||
brotli_available = True
|
||||
|
||||
try:
|
||||
from brotli import decompress as brdec
|
||||
except:
|
||||
brotli_available = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__version__ = "2.0.3"
|
||||
|
||||
# Orignally written by https://github.com/Anorov/cloudflare-scrape
|
||||
# Rewritten by VeNoMouS - <venom@gen-x.co.nz> for https://github.com/VeNoMouS/Sick-Beard - 24/3/2018 NZDT
|
||||
|
||||
DEFAULT_USER_AGENTS = [
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36",
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/65.0.3325.181 Chrome/65.0.3325.181 Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.0; Moto G (5) Build/NPPS25.137-93-8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.137 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:59.0) Gecko/20100101 Firefox/59.0",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0",
|
||||
]
|
||||
|
||||
BUG_REPORT = """\
|
||||
Cloudflare may have changed their technique, or there may be a bug in the script.
|
||||
"""
|
||||
|
||||
|
||||
cur_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
if brotli_available:
|
||||
brwsrs = os.path.join(cur_path, "browsers_br.json")
|
||||
with open(brwsrs, "r") as f:
|
||||
UA_COMBO = json.load(f, object_pairs_hook=OrderedDict)["chrome"]
|
||||
|
||||
else:
|
||||
brwsrs = os.path.join(cur_path, "browsers.json")
|
||||
UA_COMBO = []
|
||||
with open(brwsrs, "r") as f:
|
||||
_brwsrs = json.load(f, object_pairs_hook=OrderedDict)
|
||||
for entry in _brwsrs:
|
||||
_entry = OrderedDict(("-".join(a.capitalize() for a in key.split("-")), value)
|
||||
for key, value in entry.iteritems())
|
||||
_entry["User-Agent"] = None
|
||||
UA_COMBO.append({"User-Agent": [entry["user-agent"]], "headers": _entry})
|
||||
|
||||
|
||||
class NeedsCaptchaException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class CloudflareScraper(Session):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.delay = kwargs.pop('delay', 8)
|
||||
self.debug = False
|
||||
self._was_cf = False
|
||||
self._ua = None
|
||||
self._hdrs = None
|
||||
|
||||
super(CloudflareScraper, self).__init__(*args, **kwargs)
|
||||
|
||||
if not self._ua:
|
||||
# Set a random User-Agent if no custom User-Agent has been set
|
||||
ua_combo = random.choice(UA_COMBO)
|
||||
self._ua = random.choice(ua_combo["User-Agent"])
|
||||
self._hdrs = ua_combo["headers"].copy()
|
||||
self._hdrs["User-Agent"] = self._ua
|
||||
self.headers['User-Agent'] = self._ua
|
||||
|
||||
def set_cloudflare_challenge_delay(self, delay):
|
||||
if isinstance(delay, (int, float)) and delay > 0:
|
||||
self.delay = delay
|
||||
|
||||
def is_cloudflare_challenge(self, resp):
|
||||
if resp.headers.get('Server', '').startswith('cloudflare'):
|
||||
if b'why_captcha' in resp.content or b'/cdn-cgi/l/chk_captcha' in resp.content:
|
||||
raise NeedsCaptchaException
|
||||
|
||||
return (
|
||||
resp.status_code in [429, 503]
|
||||
and b"jschl_vc" in resp.content
|
||||
and b"jschl_answer" in resp.content
|
||||
)
|
||||
return False
|
||||
|
||||
def debugRequest(self, req):
|
||||
try:
|
||||
print (dump.dump_all(req).decode('utf-8'))
|
||||
except:
|
||||
pass
|
||||
|
||||
def request(self, method, url, *args, **kwargs):
|
||||
# self.headers = (
|
||||
# OrderedDict(
|
||||
# [
|
||||
# ('User-Agent', self.headers['User-Agent']),
|
||||
# ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'),
|
||||
# ('Accept-Language', 'en-US,en;q=0.5'),
|
||||
# ('Accept-Encoding', 'gzip, deflate'),
|
||||
# ('Connection', 'close'),
|
||||
# ('Upgrade-Insecure-Requests', '1')
|
||||
# ]
|
||||
# )
|
||||
# )
|
||||
self.headers = self._hdrs.copy()
|
||||
|
||||
resp = super(CloudflareScraper, self).request(method, url, *args, **kwargs)
|
||||
if resp.headers.get('content-encoding') == 'br' and brotli_available:
|
||||
resp._content = brdec(resp._content)
|
||||
|
||||
# Debug request
|
||||
if self.debug:
|
||||
self.debugRequest(resp)
|
||||
|
||||
# Check if Cloudflare anti-bot is on
|
||||
try:
|
||||
if self.is_cloudflare_challenge(resp):
|
||||
self._was_cf = True
|
||||
# Work around if the initial request is not a GET,
|
||||
# Superseed with a GET then re-request the orignal METHOD.
|
||||
if resp.request.method != 'GET':
|
||||
self.request('GET', resp.url)
|
||||
resp = self.request(method, url, *args, **kwargs)
|
||||
else:
|
||||
resp = self.solve_cf_challenge(resp, **kwargs)
|
||||
except NeedsCaptchaException:
|
||||
# solve the captcha
|
||||
self._was_cf = True
|
||||
site_key = re.search(r'data-sitekey="(.+?)"', resp.content).group(1)
|
||||
challenge_s = re.search(r'type="hidden" name="s" value="(.+?)"', resp.content).group(1)
|
||||
challenge_ray = re.search(r'data-ray="(.+?)"', resp.content).group(1)
|
||||
if not all([site_key, challenge_s, challenge_ray]):
|
||||
raise Exception("cf: Captcha site-key not found!")
|
||||
|
||||
pitcher = pitchers.get_pitcher()("cf", resp.request.url, site_key,
|
||||
user_agent=self.headers["User-Agent"],
|
||||
cookies=self.cookies.get_dict(),
|
||||
is_invisible=True)
|
||||
|
||||
parsed_url = urlparse(resp.url)
|
||||
domain = parsed_url.netloc
|
||||
logger.info("cf: %s: Solving captcha", domain)
|
||||
result = pitcher.throw()
|
||||
if not result:
|
||||
raise Exception("cf: Couldn't solve captcha!")
|
||||
|
||||
submit_url = '{}://{}/cdn-cgi/l/chk_captcha'.format(parsed_url.scheme, domain)
|
||||
method = resp.request.method
|
||||
|
||||
cloudflare_kwargs = {
|
||||
'allow_redirects': False,
|
||||
'headers': {'Referer': resp.url},
|
||||
'params': OrderedDict(
|
||||
[
|
||||
('s', challenge_s),
|
||||
('g-recaptcha-response', result)
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
return self.request(method, submit_url, **cloudflare_kwargs)
|
||||
|
||||
return resp
|
||||
|
||||
def solve_cf_challenge(self, resp, **original_kwargs):
|
||||
body = resp.text
|
||||
|
||||
# Cloudflare requires a delay before solving the challenge
|
||||
if self.delay == 8:
|
||||
try:
|
||||
delay = float(re.search(r'submit\(\);\r?\n\s*},\s*([0-9]+)', body).group(1)) / float(1000)
|
||||
if isinstance(delay, (int, float)):
|
||||
self.delay = delay
|
||||
except:
|
||||
pass
|
||||
|
||||
sleep(self.delay)
|
||||
|
||||
parsed_url = urlparse(resp.url)
|
||||
domain = parsed_url.netloc
|
||||
submit_url = '{}://{}/cdn-cgi/l/chk_jschl'.format(parsed_url.scheme, domain)
|
||||
|
||||
cloudflare_kwargs = deepcopy(original_kwargs)
|
||||
headers = cloudflare_kwargs.setdefault('headers', {'Referer': resp.url})
|
||||
|
||||
try:
|
||||
params = cloudflare_kwargs.setdefault(
|
||||
'params', OrderedDict(
|
||||
[
|
||||
('s', re.search(r'name="s"\svalue="(?P<s_value>[^"]+)', body).group('s_value')),
|
||||
('jschl_vc', re.search(r'name="jschl_vc" value="(\w+)"', body).group(1)),
|
||||
('pass', re.search(r'name="pass" value="(.+?)"', body).group(1)),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Something is wrong with the page.
|
||||
# This may indicate Cloudflare has changed their anti-bot
|
||||
# technique. If you see this and are running the latest version,
|
||||
# please open a GitHub issue so I can update the code accordingly.
|
||||
raise ValueError("Unable to parse Cloudflare anti-bots page: {} {}".format(e.message, BUG_REPORT))
|
||||
|
||||
# Solve the Javascript challenge
|
||||
params['jschl_answer'] = self.solve_challenge(body, domain)
|
||||
|
||||
# Requests transforms any request into a GET after a redirect,
|
||||
# so the redirect has to be handled manually here to allow for
|
||||
# performing other types of requests even as the first request.
|
||||
method = resp.request.method
|
||||
|
||||
cloudflare_kwargs['allow_redirects'] = False
|
||||
|
||||
redirect = self.request(method, submit_url, **cloudflare_kwargs)
|
||||
redirect_location = urlparse(redirect.headers['Location'])
|
||||
if not redirect_location.netloc:
|
||||
redirect_url = urlunparse(
|
||||
(
|
||||
parsed_url.scheme,
|
||||
domain,
|
||||
redirect_location.path,
|
||||
redirect_location.params,
|
||||
redirect_location.query,
|
||||
redirect_location.fragment
|
||||
)
|
||||
)
|
||||
return self.request(method, redirect_url, **original_kwargs)
|
||||
|
||||
return self.request(method, redirect.headers['Location'], **original_kwargs)
|
||||
|
||||
def solve_challenge(self, body, domain):
|
||||
try:
|
||||
js = re.search(
|
||||
r"setTimeout\(function\(\){\s+(var s,t,o,p,b,r,e,a,k,i,n,g,f.+?\r?\n[\s\S]+?a\.value =.+?)\r?\n",
|
||||
body
|
||||
).group(1)
|
||||
except Exception:
|
||||
raise ValueError("Unable to identify Cloudflare IUAM Javascript on website. {}".format(BUG_REPORT))
|
||||
|
||||
js = re.sub(r"a\.value = ((.+).toFixed\(10\))?", r"\1", js)
|
||||
js = re.sub(r'(e\s=\sfunction\(s\)\s{.*?};)', '', js, flags=re.DOTALL|re.MULTILINE)
|
||||
js = re.sub(r"\s{3,}[a-z](?: = |\.).+", "", js).replace("t.length", str(len(domain)))
|
||||
|
||||
js = js.replace('; 121', '')
|
||||
|
||||
# Strip characters that could be used to exit the string context
|
||||
# These characters are not currently used in Cloudflare's arithmetic snippet
|
||||
js = re.sub(r"[\n\\']", "", js)
|
||||
|
||||
if 'toFixed' not in js:
|
||||
raise ValueError("Error parsing Cloudflare IUAM Javascript challenge. {}".format(BUG_REPORT))
|
||||
|
||||
try:
|
||||
jsEnv = """
|
||||
var t = "{domain}";
|
||||
var g = String.fromCharCode;
|
||||
|
||||
o = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||
e = function(s) {{
|
||||
s += "==".slice(2 - (s.length & 3));
|
||||
var bm, r = "", r1, r2, i = 0;
|
||||
for (; i < s.length;) {{
|
||||
bm = o.indexOf(s.charAt(i++)) << 18 | o.indexOf(s.charAt(i++)) << 12 | (r1 = o.indexOf(s.charAt(i++))) << 6 | (r2 = o.indexOf(s.charAt(i++)));
|
||||
r += r1 === 64 ? g(bm >> 16 & 255) : r2 === 64 ? g(bm >> 16 & 255, bm >> 8 & 255) : g(bm >> 16 & 255, bm >> 8 & 255, bm & 255);
|
||||
}}
|
||||
return r;
|
||||
}};
|
||||
|
||||
function italics (str) {{ return '<i>' + this + '</i>'; }};
|
||||
var document = {{
|
||||
getElementById: function () {{
|
||||
return {{'innerHTML': '{innerHTML}'}};
|
||||
}}
|
||||
}};
|
||||
{js}
|
||||
"""
|
||||
|
||||
innerHTML = re.search(
|
||||
'<div(?: [^<>]*)? id="([^<>]*?)">([^<>]*?)<\/div>',
|
||||
body,
|
||||
re.MULTILINE | re.DOTALL
|
||||
)
|
||||
innerHTML = innerHTML.group(2).replace("'", r"\'") if innerHTML else ""
|
||||
|
||||
js = jsunfuck(jsEnv.format(domain=domain, innerHTML=innerHTML, js=js))
|
||||
|
||||
def atob(s):
|
||||
return base64.b64decode('{}'.format(s)).decode('utf-8')
|
||||
|
||||
js2py.disable_pyimport()
|
||||
context = js2py.EvalJs({'atob': atob})
|
||||
result = context.eval(js)
|
||||
except Exception:
|
||||
logging.error("Error executing Cloudflare IUAM Javascript. {}".format(BUG_REPORT))
|
||||
raise
|
||||
|
||||
try:
|
||||
float(result)
|
||||
except Exception:
|
||||
raise ValueError("Cloudflare IUAM challenge returned unexpected answer. {}".format(BUG_REPORT))
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def create_scraper(cls, sess=None, **kwargs):
|
||||
"""
|
||||
Convenience function for creating a ready-to-go CloudflareScraper object.
|
||||
"""
|
||||
scraper = cls(**kwargs)
|
||||
|
||||
if sess:
|
||||
attrs = ['auth', 'cert', 'cookies', 'headers', 'hooks', 'params', 'proxies', 'data']
|
||||
for attr in attrs:
|
||||
val = getattr(sess, attr, None)
|
||||
if val:
|
||||
setattr(scraper, attr, val)
|
||||
|
||||
return scraper
|
||||
|
||||
# Functions for integrating cloudflare-scrape with other applications and scripts
|
||||
@classmethod
|
||||
def get_tokens(cls, url, user_agent=None, debug=False, **kwargs):
|
||||
scraper = cls.create_scraper()
|
||||
scraper.debug = debug
|
||||
|
||||
if user_agent:
|
||||
scraper.headers['User-Agent'] = user_agent
|
||||
|
||||
try:
|
||||
resp = scraper.get(url, **kwargs)
|
||||
resp.raise_for_status()
|
||||
except Exception as e:
|
||||
logging.error("'{}' returned an error. Could not collect tokens.".format(url))
|
||||
raise
|
||||
|
||||
domain = urlparse(resp.url).netloc
|
||||
cookie_domain = None
|
||||
|
||||
for d in scraper.cookies.list_domains():
|
||||
if d.startswith('.') and d in ('.{}'.format(domain)):
|
||||
cookie_domain = d
|
||||
break
|
||||
else:
|
||||
raise ValueError("Unable to find Cloudflare cookies. Does the site actually have Cloudflare IUAM (\"I'm Under Attack Mode\") enabled?")
|
||||
|
||||
return (
|
||||
{
|
||||
'__cfduid': scraper.cookies.get('__cfduid', '', domain=cookie_domain),
|
||||
'cf_clearance': scraper.cookies.get('cf_clearance', '', domain=cookie_domain)
|
||||
},
|
||||
scraper.headers['User-Agent']
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_cookie_string(cls, url, user_agent=None, debug=False, **kwargs):
|
||||
"""
|
||||
Convenience function for building a Cookie HTTP header value.
|
||||
"""
|
||||
tokens, user_agent = cls.get_tokens(url, user_agent=user_agent, debug=debug, **kwargs)
|
||||
return "; ".join("=".join(pair) for pair in tokens.items()), user_agent
|
||||
|
||||
create_scraper = CloudflareScraper.create_scraper
|
||||
get_tokens = CloudflareScraper.get_tokens
|
||||
get_cookie_string = CloudflareScraper.get_cookie_string
|
|
@ -1,80 +0,0 @@
|
|||
[
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 5.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.102 Safari/537.36",
|
||||
"accept-encoding": "gzip,deflate",
|
||||
"accept-language": "en-US,en;q=0.8"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 5.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.101 Safari/537.36",
|
||||
"accept-encoding": "gzip,deflate",
|
||||
"accept-language": "en-US,en;q=0.8"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"upgrade-insecure-requests": "1",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36",
|
||||
"accept-language": "en-US,en;q=0.8",
|
||||
"accept-encoding": "gzip, deflate, "
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"upgrade-insecure-requests": "1",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.75 Safari/537.36",
|
||||
"accept-language": "en-US,en;q=0.8",
|
||||
"accept-encoding": "gzip, deflate, "
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "*/*",
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:30.0) Gecko/20100101 Firefox/30.0"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "image/jpeg, image/gif, image/pjpeg, application/x-ms-application, application/xaml+xml, application/x-ms-xbap, */*",
|
||||
"accept-language": "en-US",
|
||||
"user-agent": "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)",
|
||||
"accept-encoding": "gzip, deflate"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html, application/xhtml+xml, */*",
|
||||
"accept-language": "en-US",
|
||||
"user-agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)",
|
||||
"accept-encoding": "gzip, deflate"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"accept": "text/html, application/xhtml+xml, */*",
|
||||
"accept-language": "en-US",
|
||||
"user-agent": "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)",
|
||||
"accept-encoding": "gzip, deflate",
|
||||
"dnt": "1"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"accept-encoding": "gzip, deflate"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"accept-encoding": "gzip, deflate"
|
||||
},
|
||||
{
|
||||
"connection": "close",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:43.0) Gecko/20100101 Firefox/43.0",
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"accept-encoding": "gzip, deflate"
|
||||
}
|
||||
]
|
|
@ -1,336 +0,0 @@
|
|||
{
|
||||
"chrome": [
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.59 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.110 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.113 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, , br"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, br"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.146 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.67 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.80 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.92 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.92 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.109 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.170 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.81 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Accept-Encoding": "gzip, deflate, br"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"User-Agent": null,
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Accept-Encoding": "gzip, deflate, br"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.40 Safari/537.36",
|
||||
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.40 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36",
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.28 Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Accept-Encoding": "gzip, deflate, br"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Linux; Android 8.1.0; SM-N960F Build/M1AJQ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G965F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 8.0.0; Pixel 2 Build/OPD1.170816.010) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 8.0.0; Pixel Build/OPR6.170623.012) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.1.1; SM-A530F Build/NMF26X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.1; Pixel Build/NDE63H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.0; SM-G955F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.0; SM-G950F Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.0; SM-T825 Build/NRD90M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 6.0.1; SM-G930F Build/MMB29K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 6.0; Nexus 6 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 6.0; XT1092 Build/MPE24.49-18) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 6.0.1; SM-N910C Build/MMB29K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 5.0.2; SM-G920F Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 5.0; Nexus 6 Build/LRX21O) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 9; Pixel 3 XL Build/PD1A.180720.030) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 9; Pixel 3 Build/PD1A.180720.030) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 9; Pixel 2 Build/PPR1.180610.009) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 4.4; Nexus 5 Build/KRT16M) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 4.4.2; SM-T530 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 4.4.4; SM-N910C Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 5.1.1; Nexus 9 Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 7.1.1; SM-N950F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.90 Mobile Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Accept-Language": "en-US,en;q=0.9"
|
||||
}
|
||||
},
|
||||
{
|
||||
"User-Agent": [
|
||||
"Mozilla/5.0 (Linux; Android 8.1.0; SM-T835 Build/M1AJQ) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36",
|
||||
"Mozilla/5.0 (Linux; Android 5.0; XT1092 Build/LXE22.46-19) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.85 Mobile Safari/537.36"
|
||||
],
|
||||
"headers": {
|
||||
"Connection": "keep-alive",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": null,
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,97 +0,0 @@
|
|||
MAPPING = {
|
||||
'a': '(false+"")[1]',
|
||||
'b': '([]["entries"]()+"")[2]',
|
||||
'c': '([]["fill"]+"")[3]',
|
||||
'd': '(undefined+"")[2]',
|
||||
'e': '(true+"")[3]',
|
||||
'f': '(false+"")[0]',
|
||||
'g': '(false+[0]+String)[20]',
|
||||
'h': '(+(101))["to"+String["name"]](21)[1]',
|
||||
'i': '([false]+undefined)[10]',
|
||||
'j': '([]["entries"]()+"")[3]',
|
||||
'k': '(+(20))["to"+String["name"]](21)',
|
||||
'l': '(false+"")[2]',
|
||||
'm': '(Number+"")[11]',
|
||||
'n': '(undefined+"")[1]',
|
||||
'o': '(true+[]["fill"])[10]',
|
||||
'p': '(+(211))["to"+String["name"]](31)[1]',
|
||||
'q': '(+(212))["to"+String["name"]](31)[1]',
|
||||
'r': '(true+"")[1]',
|
||||
's': '(false+"")[3]',
|
||||
't': '(true+"")[0]',
|
||||
'u': '(undefined+"")[0]',
|
||||
'v': '(+(31))["to"+String["name"]](32)',
|
||||
'w': '(+(32))["to"+String["name"]](33)',
|
||||
'x': '(+(101))["to"+String["name"]](34)[1]',
|
||||
'y': '(NaN+[Infinity])[10]',
|
||||
'z': '(+(35))["to"+String["name"]](36)',
|
||||
'A': '(+[]+Array)[10]',
|
||||
'B': '(+[]+Boolean)[10]',
|
||||
'C': 'Function("return escape")()(("")["italics"]())[2]',
|
||||
'D': 'Function("return escape")()([]["fill"])["slice"]("-1")',
|
||||
'E': '(RegExp+"")[12]',
|
||||
'F': '(+[]+Function)[10]',
|
||||
'G': '(false+Function("return Date")()())[30]',
|
||||
'I': '(Infinity+"")[0]',
|
||||
'M': '(true+Function("return Date")()())[30]',
|
||||
'N': '(NaN+"")[0]',
|
||||
'O': '(NaN+Function("return{}")())[11]',
|
||||
'R': '(+[]+RegExp)[10]',
|
||||
'S': '(+[]+String)[10]',
|
||||
'T': '(NaN+Function("return Date")()())[30]',
|
||||
'U': '(NaN+Function("return{}")()["to"+String["name"]]["call"]())[11]',
|
||||
' ': '(NaN+[]["fill"])[11]',
|
||||
'"': '("")["fontcolor"]()[12]',
|
||||
'%': 'Function("return escape")()([]["fill"])[21]',
|
||||
'&': '("")["link"](0+")[10]',
|
||||
'(': '(undefined+[]["fill"])[22]',
|
||||
')': '([0]+false+[]["fill"])[20]',
|
||||
'+': '(+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]])+[])[2]',
|
||||
',': '([]["slice"]["call"](false+"")+"")[1]',
|
||||
'-': '(+(.+[0000000001])+"")[2]',
|
||||
'.': '(+(+!+[]+[+!+[]]+(!![]+[])[!+[]+!+[]+!+[]]+[!+[]+!+[]]+[+[]])+[])[+!+[]]',
|
||||
'/': '(false+[0])["italics"]()[10]',
|
||||
':': '(RegExp()+"")[3]',
|
||||
';': '("")["link"](")[14]',
|
||||
'<': '("")["italics"]()[0]',
|
||||
'=': '("")["fontcolor"]()[11]',
|
||||
'>': '("")["italics"]()[2]',
|
||||
'?': '(RegExp()+"")[2]',
|
||||
'[': '([]["entries"]()+"")[0]',
|
||||
']': '([]["entries"]()+"")[22]',
|
||||
'{': '(true+[]["fill"])[20]',
|
||||
'}': '([]["fill"]+"")["slice"]("-1")'
|
||||
}
|
||||
|
||||
SIMPLE = {
|
||||
'false': '![]',
|
||||
'true': '!![]',
|
||||
'undefined': '[][[]]',
|
||||
'NaN': '+[![]]',
|
||||
'Infinity': '+(+!+[]+(!+[]+[])[!+[]+!+[]+!+[]]+[+!+[]]+[+[]]+[+[]]+[+[]])' # +"1e1000"
|
||||
}
|
||||
|
||||
CONSTRUCTORS = {
|
||||
'Array': '[]',
|
||||
'Number': '(+[])',
|
||||
'String': '([]+[])',
|
||||
'Boolean': '(![])',
|
||||
'Function': '[]["fill"]',
|
||||
'RegExp': 'Function("return/"+false+"/")()'
|
||||
}
|
||||
|
||||
def jsunfuck(jsfuckString):
|
||||
|
||||
for key in sorted(MAPPING, key=lambda k: len(MAPPING[k]), reverse=True):
|
||||
if MAPPING.get(key) in jsfuckString:
|
||||
jsfuckString = jsfuckString.replace(MAPPING.get(key), '"{}"'.format(key))
|
||||
|
||||
for key in sorted(SIMPLE, key=lambda k: len(SIMPLE[k]), reverse=True):
|
||||
if SIMPLE.get(key) in jsfuckString:
|
||||
jsfuckString = jsfuckString.replace(SIMPLE.get(key), '{}'.format(key))
|
||||
|
||||
#for key in sorted(CONSTRUCTORS, key=lambda k: len(CONSTRUCTORS[k]), reverse=True):
|
||||
# if CONSTRUCTORS.get(key) in jsfuckString:
|
||||
# jsfuckString = jsfuckString.replace(CONSTRUCTORS.get(key), '{}'.format(key))
|
||||
|
||||
return jsfuckString
|
|
@ -1,23 +0,0 @@
|
|||
import json
|
||||
import base64
|
||||
import hmac
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
def _strcmp(a, b):
|
||||
"""Compares two strings while preventing timing attacks. Execution time
|
||||
is not affected by lenghth of common prefix on strings of the same length"""
|
||||
return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
|
||||
|
||||
class SecureSession(object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
json()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
base64.b64encode(hmac.new(tob(key), msg).digest())):
|
||||
return pickle.loads(base64.b64decode(msg))
|
|
@ -1,7 +1,14 @@
|
|||
from __future__ import print_function
|
||||
|
||||
from timeit import timeit
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
from itertools import izip
|
||||
try:
|
||||
#python 2 code
|
||||
from itertools import izip as zip
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from collections import deque
|
||||
|
||||
|
||||
|
@ -47,7 +54,7 @@ t = []
|
|||
|
||||
Type = None
|
||||
try:
|
||||
print timeit(
|
||||
print(timeit(
|
||||
"""
|
||||
|
||||
t.append(4)
|
||||
|
@ -56,7 +63,7 @@ t.pop()
|
|||
|
||||
|
||||
""",
|
||||
"from __main__ import X,Y,namedtuple,array,t,add,Type, izip",
|
||||
number=1000000)
|
||||
"from __main__ import X,Y,namedtuple,array,t,add,Type, zip",
|
||||
number=1000000))
|
||||
except:
|
||||
raise
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from __future__ import print_function
|
||||
from string import ascii_lowercase, digits
|
||||
##################################
|
||||
StringName = u'PyJsConstantString%d_'
|
||||
|
@ -305,4 +306,4 @@ if __name__ == '__main__':
|
|||
''')
|
||||
|
||||
t, d = remove_constants(test)
|
||||
print t, d
|
||||
print(t, d)
|
||||
|
|
|
@ -16,6 +16,8 @@ If case of parsing errors it must return a pos of error.
|
|||
NOTES:
|
||||
Strings and other literals are not present so each = means assignment
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from utils import *
|
||||
from jsparser import *
|
||||
|
||||
|
@ -80,4 +82,4 @@ def bass_translator(s):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print bass_translator('3.ddsd = 40')
|
||||
print(bass_translator('3.ddsd = 40'))
|
||||
|
|
|
@ -9,6 +9,8 @@ FOR 123
|
|||
FOR iter
|
||||
CONTINUE, BREAK, RETURN, LABEL, THROW, TRY, SWITCH
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from utils import *
|
||||
from jsparser import *
|
||||
from nodevisitor import exp_translator
|
||||
|
@ -477,4 +479,4 @@ def translate_flow(source):
|
|||
if __name__ == '__main__':
|
||||
#print do_dowhile('do {} while(k+f)', 0)[0]
|
||||
#print 'e: "%s"'%do_expression('++(c?g:h); mj', 0)[0]
|
||||
print translate_flow('a; yimport test')[0]
|
||||
print(translate_flow('a; yimport test')[0])
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""This module removes JS functions from source code"""
|
||||
from __future__ import print_function
|
||||
|
||||
from jsparser import *
|
||||
from utils import *
|
||||
|
||||
|
@ -94,5 +96,5 @@ def remove_functions(source, all_inline=False):
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print remove_functions(
|
||||
'5+5 function n (functiona ,functionaj) {dsd s, dsdd}')
|
||||
print(remove_functions(
|
||||
'5+5 function n (functiona ,functionaj) {dsd s, dsdd}'))
|
||||
|
|
|
@ -45,6 +45,7 @@ TODO
|
|||
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from utils import *
|
||||
|
||||
|
@ -64,7 +65,7 @@ OP_METHODS = {
|
|||
|
||||
def dbg(source):
|
||||
try:
|
||||
with open('C:\Users\Piotrek\Desktop\dbg.py', 'w') as f:
|
||||
with open(r'C:\Users\Piotrek\Desktop\dbg.py', 'w') as f:
|
||||
f.write(source)
|
||||
except:
|
||||
pass
|
||||
|
@ -77,13 +78,13 @@ def indent(lines, ind=4):
|
|||
def inject_before_lval(source, lval, code):
|
||||
if source.count(lval) > 1:
|
||||
dbg(source)
|
||||
print
|
||||
print lval
|
||||
print()
|
||||
print(lval)
|
||||
raise RuntimeError('To many lvals (%s)' % lval)
|
||||
elif not source.count(lval):
|
||||
dbg(source)
|
||||
print
|
||||
print lval
|
||||
print()
|
||||
print(lval)
|
||||
assert lval not in source
|
||||
raise RuntimeError('No lval found "%s"' % lval)
|
||||
end = source.index(lval)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import print_function
|
||||
|
||||
from jsparser import *
|
||||
from utils import *
|
||||
import re
|
||||
|
@ -557,6 +559,6 @@ if __name__ == '__main__':
|
|||
#print 'Here', trans('(eee ) . ii [ PyJsMarker ] [ jkj ] ( j , j ) .
|
||||
# jiji (h , ji , i)(non )( )()()()')
|
||||
for e in xrange(3):
|
||||
print exp_translator('jk = kk.ik++')
|
||||
print(exp_translator('jk = kk.ik++'))
|
||||
#First line translated with PyJs: PyJsStrictEq(PyJsAdd((Js(100)*Js(50)),Js(30)), Js("5030")), yay!
|
||||
print exp_translator('delete a.f')
|
||||
print(exp_translator('delete a.f'))
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
""" This module removes all objects/arrays from JS source code and replace them with LVALS.
|
||||
Also it has s function translating removed object/array to python code.
|
||||
Use this module just after removing constants. Later move on to removing functions"""
|
||||
from __future__ import print_function
|
||||
|
||||
OBJECT_LVAL = 'PyJsLvalObject%d_'
|
||||
ARRAY_LVAL = 'PyJsLvalArray%d_'
|
||||
from utils import *
|
||||
|
@ -180,7 +182,7 @@ def translate_object(obj, lval, obj_count=1, arr_count=1):
|
|||
try:
|
||||
key, value = spl
|
||||
except: #len(spl)> 2
|
||||
print 'Unusual case ' + repr(e)
|
||||
print('Unusual case ' + repr(e))
|
||||
key = spl[0]
|
||||
value = ':'.join(spl[1:])
|
||||
key = key.strip()
|
||||
|
@ -293,8 +295,8 @@ if __name__ == '__main__':
|
|||
|
||||
#print remove_objects(test)
|
||||
#print list(bracket_split(' {}'))
|
||||
print
|
||||
print remove_arrays(
|
||||
print()
|
||||
print(remove_arrays(
|
||||
'typeof a&&!db.test(a)&&!ib[(bb.exec(a)||["",""], [][[5][5]])[1].toLowerCase()])'
|
||||
)
|
||||
print is_object('', ')')
|
||||
))
|
||||
print(is_object('', ')'))
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import print_function
|
||||
|
||||
from flow import translate_flow
|
||||
from constants import remove_constants, recover_constants
|
||||
from objects import remove_objects, remove_arrays, translate_object, translate_array, set_func_translator
|
||||
|
@ -148,4 +150,4 @@ if __name__ == '__main__':
|
|||
#res = translate_js(jq)
|
||||
res = translate_js(t)
|
||||
dbg(SANDBOX % indent(res))
|
||||
print 'Done'
|
||||
print('Done')
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
from internals import byte_trans
|
||||
from internals import seval
|
||||
import pyjsparser
|
||||
|
||||
x = r'''
|
||||
function g() {var h123 = 11; return [function g1() {return h123}, new Function('return h123')]}
|
||||
g()[1]()
|
||||
'''
|
||||
print seval.eval_js_vm(x)
|
|
@ -1,11 +0,0 @@
|
|||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__version__ = '0.7.0'
|
||||
|
||||
|
||||
try:
|
||||
from plex.client import Plex
|
||||
except Exception as ex:
|
||||
log.warn('Unable to import submodules - %s', ex, exc_info=True)
|
|
@ -1,116 +0,0 @@
|
|||
from plex.core.configuration import ConfigurationManager
|
||||
from plex.core.http import HttpClient
|
||||
from plex.helpers import has_attribute
|
||||
from plex.interfaces import construct_map
|
||||
from plex.interfaces.core.base import InterfaceProxy
|
||||
from plex.lib.six import add_metaclass
|
||||
from plex.objects.core.manager import ObjectManager
|
||||
|
||||
import logging
|
||||
import socket
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PlexClient(object):
|
||||
__interfaces = None
|
||||
|
||||
def __init__(self):
|
||||
# Construct interfaces
|
||||
self.http = HttpClient(self)
|
||||
self.configuration = ConfigurationManager()
|
||||
|
||||
self.__interfaces = construct_map(self)
|
||||
|
||||
# Discover modules
|
||||
ObjectManager.construct()
|
||||
|
||||
@property
|
||||
def base_url(self):
|
||||
host = self.configuration.get('server.host', '127.0.0.1')
|
||||
port = self.configuration.get('server.port', 32400)
|
||||
|
||||
return 'http://%s:%s' % (host, port)
|
||||
|
||||
def __getitem__(self, path):
|
||||
parts = path.strip('/').split('/')
|
||||
|
||||
cur = self.__interfaces
|
||||
parameters = []
|
||||
|
||||
while parts and type(cur) is dict:
|
||||
key = parts.pop(0)
|
||||
|
||||
if key == '*':
|
||||
key = None
|
||||
elif key not in cur:
|
||||
if None in cur:
|
||||
parameters.append(key)
|
||||
|
||||
cur = cur[None]
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
cur = cur[key]
|
||||
|
||||
while type(cur) is dict:
|
||||
cur = cur.get(None)
|
||||
|
||||
if parts:
|
||||
parameters.extend(parts)
|
||||
|
||||
if parameters:
|
||||
return InterfaceProxy(cur, parameters)
|
||||
|
||||
return cur
|
||||
|
||||
def __getattr__(self, name):
|
||||
interface = self.__interfaces.get(None)
|
||||
|
||||
if not interface:
|
||||
raise Exception("Root interface not found")
|
||||
|
||||
return getattr(interface, name)
|
||||
|
||||
|
||||
class PlexMeta(type):
|
||||
@property
|
||||
def client(cls):
|
||||
if cls._client is None:
|
||||
cls.construct()
|
||||
|
||||
return cls._client
|
||||
|
||||
def __getattr__(self, name):
|
||||
if has_attribute(self, name):
|
||||
return super(PlexMeta, self).__getattribute__(name)
|
||||
|
||||
if self.client is None:
|
||||
self.construct()
|
||||
|
||||
return getattr(self.client, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if has_attribute(self, name):
|
||||
return super(PlexMeta, self).__setattr__(name, value)
|
||||
|
||||
if self.client is None:
|
||||
self.construct()
|
||||
|
||||
setattr(self.client, name, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
if self.client is None:
|
||||
self.construct()
|
||||
|
||||
return self.client[key]
|
||||
|
||||
|
||||
@add_metaclass(PlexMeta)
|
||||
class Plex(object):
|
||||
_client = None
|
||||
|
||||
@classmethod
|
||||
def construct(cls):
|
||||
cls._client = PlexClient()
|
|
@ -1,115 +0,0 @@
|
|||
class ConfigurationManager(object):
|
||||
def __init__(self):
|
||||
self.stack = [
|
||||
Configuration(self)
|
||||
]
|
||||
|
||||
@property
|
||||
def current(self):
|
||||
return self.stack[-1]
|
||||
|
||||
@property
|
||||
def defaults(self):
|
||||
return self.stack[0]
|
||||
|
||||
def authentication(self, token):
|
||||
return Configuration(self).authentication(token)
|
||||
|
||||
def cache(self, **definitions):
|
||||
return Configuration(self).cache(**definitions)
|
||||
|
||||
def client(self, identifier, product, version):
|
||||
return Configuration(self).client(identifier, product, version)
|
||||
|
||||
def device(self, name, system):
|
||||
return Configuration(self).device(name, system)
|
||||
|
||||
def headers(self, headers):
|
||||
return Configuration(self).headers(headers)
|
||||
|
||||
def platform(self, name, version):
|
||||
return Configuration(self).platform(name, version)
|
||||
|
||||
def server(self, host='127.0.0.1', port=32400):
|
||||
return Configuration(self).server(host, port)
|
||||
|
||||
def get(self, key, default=None):
|
||||
for x in range(len(self.stack) - 1, -1, -1):
|
||||
value = self.stack[x].get(key)
|
||||
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.get(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.current[key] = value
|
||||
|
||||
|
||||
class Configuration(object):
|
||||
def __init__(self, manager):
|
||||
self.manager = manager
|
||||
|
||||
self.data = {}
|
||||
|
||||
def authentication(self, token):
|
||||
self.data['authentication.token'] = token
|
||||
|
||||
return self
|
||||
|
||||
def cache(self, **definitions):
|
||||
for key, value in definitions.items():
|
||||
self.data['cache.%s' % key] = value
|
||||
|
||||
return self
|
||||
|
||||
def client(self, identifier, product, version):
|
||||
self.data['client.identifier'] = identifier
|
||||
|
||||
self.data['client.product'] = product
|
||||
self.data['client.version'] = version
|
||||
|
||||
return self
|
||||
|
||||
def device(self, name, system):
|
||||
self.data['device.name'] = name
|
||||
self.data['device.system'] = system
|
||||
|
||||
return self
|
||||
|
||||
def headers(self, headers):
|
||||
self.data['headers'] = headers
|
||||
|
||||
return self
|
||||
|
||||
def platform(self, name, version):
|
||||
self.data['platform.name'] = name
|
||||
self.data['platform.version'] = version
|
||||
|
||||
return self
|
||||
|
||||
def server(self, host='127.0.0.1', port=32400):
|
||||
self.data['server.host'] = host
|
||||
self.data['server.port'] = port
|
||||
|
||||
return self
|
||||
|
||||
def get(self, key, default=None):
|
||||
return self.data.get(key, default)
|
||||
|
||||
def __enter__(self):
|
||||
self.manager.stack.append(self)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
item = self.manager.stack.pop()
|
||||
|
||||
assert item == self
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.data[key] = value
|
|
@ -1,26 +0,0 @@
|
|||
from threading import Lock
|
||||
|
||||
|
||||
class Context(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __getattr__(self, key):
|
||||
return self.kwargs.get(key)
|
||||
|
||||
|
||||
class ContextStack(object):
|
||||
def __init__(self):
|
||||
self._list = []
|
||||
self._lock = Lock()
|
||||
|
||||
def pop(self):
|
||||
context = self._list.pop()
|
||||
|
||||
self._lock.release()
|
||||
return context
|
||||
|
||||
def push(self, **kwargs):
|
||||
self._lock.acquire()
|
||||
|
||||
return self._list.append(Context(**kwargs))
|
|
@ -1,105 +0,0 @@
|
|||
# ExtensionImporter (```flask.exthook```)
|
||||
# ----------------------------------
|
||||
# :copyright: (c) 2014 by Armin Ronacher.
|
||||
# :license: BSD, see LICENSE for more details.
|
||||
|
||||
from plex.lib.six import reraise
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class ExtensionImporter(object):
|
||||
"""This importer redirects imports from this submodule to other locations.
|
||||
This makes it possible to transition from the old flaskext.name to the
|
||||
newer flask_name without people having a hard time.
|
||||
"""
|
||||
|
||||
def __init__(self, module_choices, wrapper_module):
|
||||
self.module_choices = module_choices
|
||||
self.wrapper_module = wrapper_module
|
||||
self.prefix = wrapper_module + '.'
|
||||
self.prefix_cutoff = wrapper_module.count('.') + 1
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__module__ == other.__class__.__module__ and \
|
||||
self.__class__.__name__ == other.__class__.__name__ and \
|
||||
self.wrapper_module == other.wrapper_module and \
|
||||
self.module_choices == other.module_choices
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def install(self):
|
||||
sys.meta_path[:] = [x for x in sys.meta_path if self != x] + [self]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname.startswith(self.prefix):
|
||||
return self
|
||||
|
||||
def load_module(self, fullname):
|
||||
if fullname in sys.modules:
|
||||
return sys.modules[fullname]
|
||||
modname = fullname.split('.', self.prefix_cutoff)[self.prefix_cutoff]
|
||||
for path in self.module_choices:
|
||||
realname = path % modname
|
||||
try:
|
||||
__import__(realname)
|
||||
except ImportError:
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
# since we only establish the entry in sys.modules at the
|
||||
# very this seems to be redundant, but if recursive imports
|
||||
# happen we will call into the move import a second time.
|
||||
# On the second invocation we still don't have an entry for
|
||||
# fullname in sys.modules, but we will end up with the same
|
||||
# fake module name and that import will succeed since this
|
||||
# one already has a temporary entry in the modules dict.
|
||||
# Since this one "succeeded" temporarily that second
|
||||
# invocation now will have created a fullname entry in
|
||||
# sys.modules which we have to kill.
|
||||
sys.modules.pop(fullname, None)
|
||||
|
||||
# If it's an important traceback we reraise it, otherwise
|
||||
# we swallow it and try the next choice. The skipped frame
|
||||
# is the one from __import__ above which we don't care about
|
||||
if self.is_important_traceback(realname, tb):
|
||||
reraise(exc_type, exc_value, tb.tb_next)
|
||||
continue
|
||||
module = sys.modules[fullname] = sys.modules[realname]
|
||||
if '.' not in modname:
|
||||
setattr(sys.modules[self.wrapper_module], modname, module)
|
||||
return module
|
||||
raise ImportError('No module named %s' % fullname)
|
||||
|
||||
def is_important_traceback(self, important_module, tb):
|
||||
"""Walks a traceback's frames and checks if any of the frames
|
||||
originated in the given important module. If that is the case then we
|
||||
were able to import the module itself but apparently something went
|
||||
wrong when the module was imported. (Eg: import of an import failed).
|
||||
"""
|
||||
while tb is not None:
|
||||
if self.is_important_frame(important_module, tb):
|
||||
return True
|
||||
tb = tb.tb_next
|
||||
return False
|
||||
|
||||
def is_important_frame(self, important_module, tb):
|
||||
"""Checks a single frame if it's important."""
|
||||
g = tb.tb_frame.f_globals
|
||||
if '__name__' not in g:
|
||||
return False
|
||||
|
||||
module_name = g['__name__']
|
||||
|
||||
# Python 2.7 Behavior. Modules are cleaned up late so the
|
||||
# name shows up properly here. Success!
|
||||
if module_name == important_module:
|
||||
return True
|
||||
|
||||
# Some python versions will will clean up modules so early that the
|
||||
# module name at that point is no longer set. Try guessing from
|
||||
# the filename then.
|
||||
filename = os.path.abspath(tb.tb_frame.f_code.co_filename)
|
||||
test_string = os.path.sep + important_module.replace('.', os.path.sep)
|
||||
return test_string + '.py' in filename or \
|
||||
test_string + os.path.sep + '__init__.py' in filename
|
|
@ -1,59 +0,0 @@
|
|||
from plex.lib import six
|
||||
|
||||
import re
|
||||
import unicodedata
|
||||
|
||||
def flatten(text):
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
# Normalize `text` to ascii
|
||||
text = normalize(text)
|
||||
|
||||
# Remove special characters
|
||||
text = re.sub('[^A-Za-z0-9\s]+', '', text)
|
||||
|
||||
# Merge duplicate spaces
|
||||
text = ' '.join(text.split())
|
||||
|
||||
# Convert to lower-case
|
||||
return text.lower()
|
||||
|
||||
def normalize(text):
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
# Normalize unicode characters
|
||||
if type(text) is six.text_type:
|
||||
text = unicodedata.normalize('NFKD', text)
|
||||
|
||||
# Ensure text is ASCII, ignore unknown characters
|
||||
text = text.encode('ascii', 'ignore')
|
||||
|
||||
# Return decoded `text`
|
||||
return text.decode('ascii')
|
||||
|
||||
def to_iterable(value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, (list, tuple)):
|
||||
return value
|
||||
|
||||
return [value]
|
||||
|
||||
|
||||
def synchronized(func):
|
||||
def wrapper(self, *__args, **__kw):
|
||||
self._lock.acquire()
|
||||
|
||||
try:
|
||||
return func(self, *__args, **__kw)
|
||||
finally:
|
||||
self._lock.release()
|
||||
|
||||
wrapper.__name__ = func.__name__
|
||||
wrapper.__dict__ = func.__dict__
|
||||
wrapper.__doc__ = func.__doc__
|
||||
|
||||
return wrapper
|
|
@ -1,151 +0,0 @@
|
|||
from plex.core.context import ContextStack
|
||||
from plex.core.helpers import synchronized
|
||||
from plex.request import PlexRequest
|
||||
|
||||
from threading import Condition
|
||||
import hashlib
|
||||
import logging
|
||||
import requests
|
||||
import socket
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HttpClient(object):
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
|
||||
self.configuration = ContextStack()
|
||||
|
||||
self.session = None
|
||||
|
||||
# Private
|
||||
self._lock = Condition()
|
||||
|
||||
# Build requests session
|
||||
self._build()
|
||||
|
||||
@property
|
||||
def cache(self):
|
||||
return self.client.configuration.get('cache.http')
|
||||
|
||||
def configure(self, path=None):
|
||||
self.configuration.push(base_path=path)
|
||||
return self
|
||||
|
||||
def request(self, method, path=None, params=None, query=None, data=None, credentials=None, **kwargs):
|
||||
# retrieve configuration
|
||||
ctx = self.configuration.pop()
|
||||
|
||||
if path is not None and type(path) is not str:
|
||||
# Convert `path` to string (excluding NoneType)
|
||||
path = str(path)
|
||||
|
||||
if ctx.base_path and path:
|
||||
# Prepend `base_path` to relative `path`s
|
||||
if not path.startswith('/'):
|
||||
path = ctx.base_path + '/' + path
|
||||
|
||||
elif ctx.base_path:
|
||||
path = ctx.base_path
|
||||
elif not path:
|
||||
path = ''
|
||||
|
||||
request = PlexRequest(
|
||||
self.client,
|
||||
method=method,
|
||||
path=path,
|
||||
|
||||
params=params,
|
||||
query=query,
|
||||
data=data,
|
||||
|
||||
credentials=credentials,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
prepared = request.prepare()
|
||||
|
||||
# Try retrieve cached response
|
||||
response = self._cache_lookup(prepared)
|
||||
|
||||
if response:
|
||||
return response
|
||||
|
||||
# TODO retrying requests on 502, 503 errors?
|
||||
# try:
|
||||
# response = self.session.send(prepared)
|
||||
# except socket.gaierror as e:
|
||||
# code, _ = e
|
||||
#
|
||||
# if code != 8:
|
||||
# raise e
|
||||
#
|
||||
# log.warn('Encountered socket.gaierror (code: 8)')
|
||||
#
|
||||
# response = self._build().send(prepared)
|
||||
response = request.request.send()
|
||||
|
||||
# Store response in cache
|
||||
self._cache_store(prepared, response)
|
||||
|
||||
return response
|
||||
|
||||
def get(self, path=None, params=None, query=None, data=None, **kwargs):
|
||||
return self.request('GET', path, params, query, data, **kwargs)
|
||||
|
||||
def put(self, path=None, params=None, query=None, data=None, **kwargs):
|
||||
return self.request('PUT', path, params, query, data, **kwargs)
|
||||
|
||||
def post(self, path=None, params=None, query=None, data=None, **kwargs):
|
||||
return self.request('POST', path, params, query, data, **kwargs)
|
||||
|
||||
def delete(self, path=None, params=None, query=None, data=None, **kwargs):
|
||||
return self.request('DELETE', path, params, query, data, **kwargs)
|
||||
|
||||
def _build(self):
|
||||
if self.session:
|
||||
log.info('Rebuilding session and connection pools...')
|
||||
|
||||
# Rebuild the connection pool (old pool has stale connections)
|
||||
self.session = requests.Session()
|
||||
|
||||
return self.session
|
||||
|
||||
@synchronized
|
||||
def _cache_lookup(self, request):
|
||||
if self.cache is None:
|
||||
return None
|
||||
|
||||
if request.method not in ['GET']:
|
||||
return None
|
||||
|
||||
# Retrieve from cache
|
||||
return self.cache.get(self._cache_key(request))
|
||||
|
||||
@synchronized
|
||||
def _cache_store(self, request, response):
|
||||
if self.cache is None:
|
||||
return None
|
||||
|
||||
if request.method not in ['GET']:
|
||||
return None
|
||||
|
||||
# Store in cache
|
||||
self.cache[self._cache_key(request)] = response
|
||||
|
||||
@staticmethod
|
||||
def _cache_key(request):
|
||||
raw = ','.join([request.method, request.url])
|
||||
|
||||
# Generate MD5 hash of key
|
||||
m = hashlib.md5()
|
||||
m.update(raw.encode('utf-8'))
|
||||
|
||||
return m.hexdigest()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
|
@ -1,54 +0,0 @@
|
|||
from plex.lib.six import string_types
|
||||
|
||||
class idict(dict):
|
||||
def __init__(self, initial=None):
|
||||
if initial:
|
||||
self.update(initial)
|
||||
|
||||
def get(self, k, d=None):
|
||||
if isinstance(k, string_types):
|
||||
k = k.lower()
|
||||
|
||||
if super(idict, self).__contains__(k):
|
||||
return self[k]
|
||||
|
||||
return d
|
||||
|
||||
def update(self, E=None, **F):
|
||||
if E:
|
||||
if hasattr(E, 'keys'):
|
||||
# Update with `E` dictionary
|
||||
for k in E:
|
||||
self[k] = E[k]
|
||||
else:
|
||||
# Update with `E` items
|
||||
for (k, v) in E:
|
||||
self[k] = v
|
||||
|
||||
# Update with `F` dictionary
|
||||
for k in F:
|
||||
self[k] = F[k]
|
||||
|
||||
def __contains__(self, k):
|
||||
if isinstance(k, string_types):
|
||||
k = k.lower()
|
||||
|
||||
return super(idict, self).__contains__(k)
|
||||
|
||||
def __delitem__(self, k):
|
||||
if isinstance(k, string_types):
|
||||
k = k.lower()
|
||||
|
||||
super(idict, self).__delitem__(k)
|
||||
|
||||
def __getitem__(self, k):
|
||||
if isinstance(k, string_types):
|
||||
k = k.lower()
|
||||
|
||||
return super(idict, self).__getitem__(k)
|
||||
|
||||
def __setitem__(self, k, value):
|
||||
if isinstance(k, string_types):
|
||||
k = k.lower()
|
||||
|
||||
super(idict, self).__setitem__(k, value)
|
|
@ -1,4 +0,0 @@
|
|||
from plex.core.extension import ExtensionImporter
|
||||
|
||||
importer = ExtensionImporter(['plex_%s'], __name__)
|
||||
importer.install()
|
|
@ -1,6 +0,0 @@
|
|||
def has_attribute(obj, name):
|
||||
try:
|
||||
object.__getattribute__(obj, name)
|
||||
return True
|
||||
except AttributeError:
|
||||
return False
|
|
@ -1,81 +0,0 @@
|
|||
from plex.interfaces.channel import ChannelInterface
|
||||
from plex.interfaces.library import LibraryInterface
|
||||
from plex.interfaces.library.metadata import LibraryMetadataInterface
|
||||
from plex.interfaces.plugin import PluginInterface
|
||||
from plex.interfaces.plugin.preferences import PluginPreferencesInterface
|
||||
from plex.interfaces.preferences import PreferencesInterface
|
||||
from plex.interfaces.root import RootInterface
|
||||
from plex.interfaces.section import SectionInterface
|
||||
from plex.interfaces.status import StatusInterface
|
||||
from plex.interfaces.timeline import TimelineInterface
|
||||
|
||||
|
||||
# TODO automatic interface discovery
|
||||
|
||||
INTERFACES = [
|
||||
RootInterface,
|
||||
|
||||
# /
|
||||
ChannelInterface,
|
||||
StatusInterface,
|
||||
|
||||
# /library
|
||||
LibraryInterface,
|
||||
LibraryMetadataInterface,
|
||||
SectionInterface,
|
||||
|
||||
# /:
|
||||
PreferencesInterface,
|
||||
TimelineInterface,
|
||||
|
||||
# /:/plugins
|
||||
PluginInterface,
|
||||
PluginPreferencesInterface
|
||||
]
|
||||
|
||||
|
||||
def get_interfaces():
|
||||
for interface in INTERFACES:
|
||||
if interface.path:
|
||||
path = interface.path.strip('/')
|
||||
else:
|
||||
path = ''
|
||||
|
||||
if path:
|
||||
path = path.split('/')
|
||||
else:
|
||||
path = []
|
||||
|
||||
yield path, interface
|
||||
|
||||
|
||||
def construct_map(client, d=None, interfaces=None):
|
||||
if d is None:
|
||||
d = {}
|
||||
|
||||
if interfaces is None:
|
||||
interfaces = get_interfaces()
|
||||
|
||||
for path, interface in interfaces:
|
||||
if len(path) > 0:
|
||||
key = path.pop(0)
|
||||
else:
|
||||
key = None
|
||||
|
||||
if key == '*':
|
||||
key = None
|
||||
|
||||
if len(path) == 0:
|
||||
d[key] = interface(client)
|
||||
continue
|
||||
|
||||
value = d.get(key, {})
|
||||
|
||||
if type(value) is not dict:
|
||||
value = {None: value}
|
||||
|
||||
construct_map(client, value, [(path, interface)])
|
||||
|
||||
d[key] = value
|
||||
|
||||
return d
|
|
@ -1,8 +0,0 @@
|
|||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class ChannelInterface(Interface):
|
||||
path = 'channels'
|
||||
|
||||
def all(self):
|
||||
raise NotImplementedError()
|
|
@ -1,216 +0,0 @@
|
|||
from plex.lib.six import string_types, StringIO
|
||||
from plex.lib.six.moves.urllib_parse import urlparse
|
||||
|
||||
from functools import wraps
|
||||
import logging
|
||||
|
||||
# Import available parser
|
||||
PARSER = None
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
PARSER = 'etree.HTMLParser'
|
||||
except ImportError:
|
||||
from xml.etree import ElementTree as etree
|
||||
PARSER = 'etree.XMLParser'
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Helpers(object):
|
||||
@staticmethod
|
||||
def get(node, attr):
|
||||
if PARSER == 'etree.HTMLParser':
|
||||
return node.get(attr.lower())
|
||||
|
||||
return node.get(attr)
|
||||
|
||||
@staticmethod
|
||||
def find(node, tag):
|
||||
if PARSER == 'etree.HTMLParser':
|
||||
return node.find(tag.lower())
|
||||
|
||||
return node.find(tag)
|
||||
|
||||
@staticmethod
|
||||
def findall(node, tag):
|
||||
if PARSER == 'etree.HTMLParser':
|
||||
return node.findall(tag.lower())
|
||||
|
||||
return node.findall(tag)
|
||||
|
||||
|
||||
class Interface(object):
|
||||
helpers = Helpers
|
||||
|
||||
path = None
|
||||
object_map = {}
|
||||
|
||||
def __init__(self, client):
|
||||
self.client = client
|
||||
|
||||
def __getitem__(self, name):
|
||||
if hasattr(self, name):
|
||||
return getattr(self, name)
|
||||
|
||||
raise ValueError('Unknown action "%s" on %s', name, self)
|
||||
|
||||
@property
|
||||
def http(self):
|
||||
if not self.client:
|
||||
return None
|
||||
|
||||
return self.client.http.configure(self.path)
|
||||
|
||||
def parse(self, response, schema):
|
||||
if response.status_code < 200 or response.status_code >= 300:
|
||||
return None
|
||||
|
||||
try:
|
||||
root = self.__parse_xml(response.content)
|
||||
except SyntaxError as ex:
|
||||
log.error('Unable to parse XML response: %s', ex, exc_info=True, extra={
|
||||
'data': {
|
||||
'snippet': self.__error_snippet(response, ex)
|
||||
}
|
||||
})
|
||||
|
||||
return None
|
||||
except Exception as ex:
|
||||
log.error('Unable to parse XML response: %s', ex, exc_info=True)
|
||||
|
||||
return None
|
||||
|
||||
url = urlparse(response.url)
|
||||
path = url.path
|
||||
|
||||
return self.__construct(self.client, path, root, schema)
|
||||
|
||||
@staticmethod
|
||||
def __parse_xml(content):
|
||||
if PARSER == 'etree.HTMLParser':
|
||||
html = etree.fromstring(content, parser=etree.HTMLParser())
|
||||
assert html.tag == 'html'
|
||||
|
||||
bodies = [e for e in html if e.tag == 'body']
|
||||
assert len(bodies) == 1
|
||||
|
||||
body = bodies[0]
|
||||
assert len(body) == 1
|
||||
|
||||
return body[0]
|
||||
|
||||
return etree.fromstring(content)
|
||||
|
||||
@staticmethod
|
||||
def __error_snippet(response, ex):
|
||||
# Retrieve the error line
|
||||
position = getattr(ex, 'position', None)
|
||||
|
||||
if not position or len(position) != 2:
|
||||
return None
|
||||
|
||||
n_line, n_column = position
|
||||
snippet = None
|
||||
|
||||
# Create StringIO stream
|
||||
stream = StringIO(response.text)
|
||||
|
||||
# Iterate over `content` to find `n_line`
|
||||
for x, l in enumerate(stream):
|
||||
if x < n_line - 1:
|
||||
continue
|
||||
|
||||
# Line found
|
||||
snippet = l
|
||||
break
|
||||
|
||||
# Close the stream
|
||||
stream.close()
|
||||
|
||||
if not snippet:
|
||||
# Couldn't find the line
|
||||
return None
|
||||
|
||||
# Find an attribute value containing `n_column`
|
||||
start = snippet.find('"', n_column)
|
||||
end = snippet.find('"', start + 1)
|
||||
|
||||
# Trim `snippet` (if attribute value was found)
|
||||
if start >= 0 and end >= 0:
|
||||
return snippet[start:end + 1]
|
||||
|
||||
return snippet
|
||||
|
||||
@classmethod
|
||||
def __construct(cls, client, path, node, schema):
|
||||
if not schema:
|
||||
return None
|
||||
|
||||
# Try retrieve schema for `tag`
|
||||
item = schema.get(node.tag)
|
||||
|
||||
if item is None:
|
||||
raise ValueError('Unknown node with tag "%s"' % node.tag)
|
||||
|
||||
if type(item) is dict:
|
||||
value = cls.helpers.get(node, item.get('_', 'type'))
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
item = item.get(value)
|
||||
|
||||
if item is None:
|
||||
raise ValueError('Unknown node type "%s"' % value)
|
||||
|
||||
descriptor = None
|
||||
child_schema = None
|
||||
|
||||
if type(item) is tuple and len(item) == 2:
|
||||
descriptor, child_schema = item
|
||||
else:
|
||||
descriptor = item
|
||||
|
||||
if isinstance(descriptor, string_types):
|
||||
if descriptor not in cls.object_map:
|
||||
raise Exception('Unable to find descriptor by name "%s"' % descriptor)
|
||||
|
||||
descriptor = cls.object_map.get(descriptor)
|
||||
|
||||
if descriptor is None:
|
||||
raise Exception('Unable to find descriptor')
|
||||
|
||||
keys_used, obj = descriptor.construct(client, node, path=path)
|
||||
|
||||
# Lazy-construct children
|
||||
def iter_children():
|
||||
for child_node in node:
|
||||
item = cls.__construct(client, path, child_node, child_schema)
|
||||
|
||||
if item:
|
||||
yield item
|
||||
|
||||
obj._children = iter_children()
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class InterfaceProxy(object):
|
||||
def __init__(self, interface, args):
|
||||
self.interface = interface
|
||||
self.args = list(args)
|
||||
|
||||
def __getattr__(self, name):
|
||||
value = getattr(self.interface, name)
|
||||
|
||||
if not hasattr(value, '__call__'):
|
||||
return value
|
||||
|
||||
@wraps(value)
|
||||
def wrap(*args, **kwargs):
|
||||
args = self.args + list(args)
|
||||
|
||||
return value(*args, **kwargs)
|
||||
|
||||
return wrap
|
|
@ -1,104 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class LibraryInterface(Interface):
|
||||
path = 'library'
|
||||
|
||||
def metadata(self, rating_key):
|
||||
response = self.http.get('metadata', rating_key)
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'album': 'Album',
|
||||
'artist': 'Artist',
|
||||
|
||||
'season': 'Season',
|
||||
'show': 'Show'
|
||||
},
|
||||
'Video': {
|
||||
'episode': 'Episode',
|
||||
'clip': 'Clip',
|
||||
'movie': 'Movie'
|
||||
},
|
||||
|
||||
'Track': 'Track'
|
||||
}))
|
||||
}))
|
||||
|
||||
def on_deck(self):
|
||||
response = self.http.get('onDeck')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Video': {
|
||||
'movie': 'Movie',
|
||||
'episode': 'Episode'
|
||||
}
|
||||
}))
|
||||
}))
|
||||
|
||||
def recently_added(self):
|
||||
response = self.http.get('recentlyAdded')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'album': 'Album',
|
||||
'season': 'Season'
|
||||
},
|
||||
'Video': {
|
||||
'movie': 'Movie'
|
||||
}
|
||||
}))
|
||||
}))
|
||||
|
||||
def sections(self):
|
||||
response = self.http.get('sections')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('SectionContainer', idict({
|
||||
'Directory': ('Section', idict({
|
||||
'Location': 'Location'
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
|
||||
#
|
||||
# Item actions
|
||||
#
|
||||
|
||||
def rate(self, key, rating):
|
||||
response = self.http.get(
|
||||
'/:/rate',
|
||||
query={
|
||||
'identifier': 'com.plexapp.plugins.library',
|
||||
'key': key,
|
||||
'rating': int(round(rating, 0))
|
||||
}
|
||||
)
|
||||
|
||||
return response.status_code == 200
|
||||
|
||||
def scrobble(self, key):
|
||||
response = self.http.get(
|
||||
'/:/scrobble',
|
||||
query={
|
||||
'identifier': 'com.plexapp.plugins.library',
|
||||
'key': key
|
||||
}
|
||||
)
|
||||
|
||||
return response.status_code == 200
|
||||
|
||||
def unscrobble(self, key):
|
||||
response = self.http.get(
|
||||
'/:/unscrobble',
|
||||
query={
|
||||
'identifier': 'com.plexapp.plugins.library',
|
||||
'key': key
|
||||
}
|
||||
)
|
||||
|
||||
return response.status_code == 200
|
|
@ -1,65 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class LibraryMetadataInterface(Interface):
|
||||
path = 'library/metadata'
|
||||
|
||||
def refresh(self, key):
|
||||
response = self.http.put(str(key) + "/refresh")
|
||||
|
||||
def all_leaves(self, key):
|
||||
response = self.http.get(key, 'allLeaves')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': {
|
||||
'_': 'viewGroup',
|
||||
|
||||
'episode': ('ShowLeavesContainer', idict({
|
||||
'Video': {
|
||||
'episode': 'Episode'
|
||||
}
|
||||
})),
|
||||
|
||||
'track': ('ArtistLeavesContainer', idict({
|
||||
'Track': 'Track'
|
||||
}))
|
||||
}
|
||||
}))
|
||||
|
||||
def children(self, key):
|
||||
response = self.http.get(key, 'children')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': {
|
||||
'_': 'viewGroup',
|
||||
|
||||
# ---------------------------------------
|
||||
# Music
|
||||
# ---------------------------------------
|
||||
'album': ('ArtistChildrenContainer', idict({
|
||||
'Directory': {
|
||||
'album': 'Album'
|
||||
}
|
||||
})),
|
||||
|
||||
'track': ('AlbumChildrenContainer', idict({
|
||||
'Track': 'Track'
|
||||
})),
|
||||
|
||||
# ---------------------------------------
|
||||
# TV
|
||||
# ---------------------------------------
|
||||
'season': ('ShowChildrenContainer', idict({
|
||||
'Directory': {
|
||||
'season': 'Season'
|
||||
}
|
||||
})),
|
||||
|
||||
'episode': ('SeasonChildrenContainer', idict({
|
||||
'Video': {
|
||||
'episode': 'Episode'
|
||||
}
|
||||
}))
|
||||
}
|
||||
}))
|
|
@ -1,13 +0,0 @@
|
|||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class PluginInterface(Interface):
|
||||
path = ':/plugins'
|
||||
|
||||
def reload_services(self, plugin_id):
|
||||
response = self.http.get(plugin_id, 'services/reload')
|
||||
return response.status_code == 200
|
||||
|
||||
def restart(self, plugin_id):
|
||||
response = self.http.get(plugin_id, 'restart')
|
||||
return response.status_code == 200
|
|
@ -1,40 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class PluginPreferencesInterface(Interface):
|
||||
path = ':/plugins/*/prefs'
|
||||
|
||||
def get(self, plugin_id, id=None):
|
||||
response = self.http.get('/:/plugins/%s/prefs' % plugin_id)
|
||||
|
||||
container = self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Setting': 'Setting'
|
||||
}))
|
||||
}))
|
||||
|
||||
if container is None or id is None:
|
||||
return container
|
||||
|
||||
for setting in container:
|
||||
if setting.id == id:
|
||||
return setting
|
||||
|
||||
return None
|
||||
|
||||
def set(self, plugin_id, id, value):
|
||||
response = self.http.get('/:/plugins/%s/prefs/set' % plugin_id, query={
|
||||
id: self.to_setting_value(value, type(value))
|
||||
})
|
||||
|
||||
return response.status_code == 200
|
||||
|
||||
def to_setting_value(self, value, value_type=None):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if value_type is bool:
|
||||
return str(value).lower()
|
||||
|
||||
return str(value)
|
|
@ -1,40 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class PreferencesInterface(Interface):
|
||||
path = ':/prefs'
|
||||
|
||||
def get(self, id=None):
|
||||
response = self.http.get()
|
||||
|
||||
container = self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Setting': 'Setting'
|
||||
}))
|
||||
}))
|
||||
|
||||
if container is None or id is None:
|
||||
return container
|
||||
|
||||
for setting in container:
|
||||
if setting.id == id:
|
||||
return setting
|
||||
|
||||
return None
|
||||
|
||||
def set(self, id, value):
|
||||
response = self.http.put(query={
|
||||
id: self.to_setting_value(value, type(value))
|
||||
})
|
||||
|
||||
return response.status_code == 200
|
||||
|
||||
def to_setting_value(self, value, value_type=None):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if value_type is bool:
|
||||
return str(value).lower()
|
||||
|
||||
return str(value)
|
|
@ -1,59 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class RootInterface(Interface):
|
||||
def detail(self):
|
||||
response = self.http.get()
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('Detail', idict({
|
||||
'Directory': 'Directory'
|
||||
}))
|
||||
}))
|
||||
|
||||
def version(self):
|
||||
detail = self.detail()
|
||||
|
||||
if not detail:
|
||||
return None
|
||||
|
||||
return detail.version
|
||||
|
||||
def clients(self):
|
||||
response = self.http.get('clients')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('ClientContainer', idict({
|
||||
'Server': 'Client'
|
||||
}))
|
||||
}))
|
||||
|
||||
def players(self):
|
||||
pass
|
||||
|
||||
def servers(self):
|
||||
response = self.http.get('servers')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('Container', idict({
|
||||
'Server': 'Server'
|
||||
}))
|
||||
}))
|
||||
|
||||
def agents(self):
|
||||
response = self.http.get('system/agents')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('Container', idict({
|
||||
'Agent': 'Agent'
|
||||
}))
|
||||
}))
|
||||
|
||||
def primary_agent(self, guid, media_type):
|
||||
response = self.http.get('/system/agents/%s/config/%s' % (guid, media_type))
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('Container', idict({
|
||||
'Agent': 'Agent'
|
||||
}))
|
||||
}))
|
|
@ -1,69 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class SectionInterface(Interface):
|
||||
path = 'library/sections'
|
||||
|
||||
def all(self, key):
|
||||
response = self.http.get(key, 'all')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'artist': 'Artist',
|
||||
'show': 'Show'
|
||||
},
|
||||
'Video': {
|
||||
'movie': 'Movie'
|
||||
}
|
||||
}))
|
||||
}))
|
||||
|
||||
def recently_added(self, key):
|
||||
response = self.http.get(key, 'recentlyAdded')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'artist': 'Artist',
|
||||
'show': 'Show'
|
||||
},
|
||||
'Video': {
|
||||
'movie': 'Movie',
|
||||
'episode': 'Episode',
|
||||
'clip': 'Clip',
|
||||
}
|
||||
}))
|
||||
}))
|
||||
|
||||
def first_character(self, key, character=None):
|
||||
if character:
|
||||
response = self.http.get(key, ['firstCharacter', character])
|
||||
|
||||
# somehow plex wrongly returns items of other libraries when character is #
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'album': 'Album',
|
||||
'artist': 'Artist',
|
||||
|
||||
'season': 'Season',
|
||||
'show': 'Show'
|
||||
},
|
||||
'Video': {
|
||||
'episode': 'Episode',
|
||||
'clip': 'Clip',
|
||||
'movie': 'Movie'
|
||||
},
|
||||
'Track': 'Track'
|
||||
}))
|
||||
}))
|
||||
|
||||
response = self.http.get(key, 'firstCharacter')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': 'Directory'
|
||||
}))
|
||||
}))
|
|
@ -1,21 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
|
||||
class StatusInterface(Interface):
|
||||
path = 'status'
|
||||
|
||||
def sessions(self):
|
||||
response = self.http.get('sessions')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('SessionContainer', idict({
|
||||
'Track': 'Track',
|
||||
|
||||
'Video': {
|
||||
'episode': 'Episode',
|
||||
'clip': 'Clip',
|
||||
'movie': 'Movie'
|
||||
}
|
||||
}))
|
||||
}))
|
|
@ -1,36 +0,0 @@
|
|||
from plex.interfaces.core.base import Interface
|
||||
|
||||
TIMELINE_STATES = [
|
||||
'buffering',
|
||||
'paused',
|
||||
'playing',
|
||||
'stopped'
|
||||
]
|
||||
|
||||
|
||||
class TimelineInterface(Interface):
|
||||
path = ':/timeline'
|
||||
|
||||
def update(self, rating_key, state, time, duration, key=None, play_queue_item_id=None):
|
||||
if not rating_key:
|
||||
raise ValueError('Invalid "rating_key" parameter')
|
||||
|
||||
if time is None or duration is None:
|
||||
raise ValueError('"time" and "duration" parameters are required')
|
||||
|
||||
if state not in TIMELINE_STATES:
|
||||
raise ValueError('Unknown "state"')
|
||||
|
||||
response = self.http.get(query=[
|
||||
('ratingKey', rating_key),
|
||||
('state', state),
|
||||
|
||||
('time', time),
|
||||
('duration', duration),
|
||||
|
||||
# Optional parameters
|
||||
('key', key),
|
||||
('playQueueItemID', play_queue_item_id)
|
||||
])
|
||||
|
||||
return response and response.status_code == 200
|
|
@ -1,762 +0,0 @@
|
|||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.8.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(obj.__class__, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.iterkeys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.itervalues(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.iteritems(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.iterlists(**kw))
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
|
@ -1,29 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class MediaType(Descriptor):
|
||||
name = Property
|
||||
media_type = Property("mediaType", type=int)
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for t in cls.helpers.findall(node, 'MediaType'):
|
||||
_, obj = MediaType.construct(client, t, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
||||
|
||||
|
||||
class Agent(Descriptor):
|
||||
name = Property
|
||||
enabled = Property(type=int)
|
||||
identifier = Property
|
||||
primary = Property(type=int)
|
||||
has_prefs = Property("hasPrefs", type=int)
|
||||
has_attribution = Property("hasAttribution", type=int)
|
||||
|
||||
media_types = Property(resolver=lambda: MediaType.from_node)
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
from plex.core.helpers import to_iterable
|
||||
from plex.objects.container import Container
|
||||
from plex.objects.core.base import Property
|
||||
from plex.objects.server import Server
|
||||
|
||||
|
||||
class Client(Server):
|
||||
product = Property
|
||||
device_class = Property('deviceClass')
|
||||
|
||||
protocol = Property
|
||||
protocol_version = Property('protocolVersion', type=int)
|
||||
protocol_capabilities = Property('protocolCapabilities')
|
||||
|
||||
|
||||
class ClientContainer(Container):
|
||||
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
|
||||
|
||||
def filter(self, identifiers=None):
|
||||
identifiers = to_iterable(identifiers)
|
||||
|
||||
for client in self:
|
||||
if not self.filter_passes(identifiers, client.machine_identifier):
|
||||
continue
|
||||
|
||||
yield client
|
||||
|
||||
def get(self, identifier):
|
||||
for item in self.filter(identifier):
|
||||
return item
|
||||
|
||||
return None
|
|
@ -1,7 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Container(Descriptor):
|
||||
size = Property(type=int)
|
||||
|
||||
updated_at = Property('updatedAt', int)
|
|
@ -1,168 +0,0 @@
|
|||
from plex.lib.six import add_metaclass
|
||||
from plex.interfaces.core.base import Interface
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
import types
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Property(object):
|
||||
helpers = Interface.helpers
|
||||
|
||||
def __init__(self, name=None, type=None, resolver=None):
|
||||
self.name = name
|
||||
self.type = type
|
||||
self.resolver = resolver
|
||||
|
||||
def value(self, client, key, node, keys_used):
|
||||
if self.resolver is not None:
|
||||
return self.value_func(client, node, keys_used)
|
||||
|
||||
return self.value_node(key, node, keys_used)
|
||||
|
||||
def value_node(self, key, node, keys_used):
|
||||
value = self.helpers.get(node, key)
|
||||
keys_used.append(key.lower())
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
return self.value_convert(value)
|
||||
|
||||
def value_convert(self, value):
|
||||
if not self.type:
|
||||
return value
|
||||
|
||||
types = self.type if type(self.type) is list else [self.type]
|
||||
result = value
|
||||
|
||||
for target_type in types:
|
||||
try:
|
||||
result = target_type(result)
|
||||
except:
|
||||
return None
|
||||
|
||||
return result
|
||||
|
||||
def value_func(self, client, node, keys_used):
|
||||
func = self.resolver()
|
||||
|
||||
try:
|
||||
keys, value = func(client, node)
|
||||
|
||||
keys_used.extend([k.lower() for k in keys])
|
||||
return value
|
||||
except Exception as ex:
|
||||
log.warn('Exception in value function (%s): %s - %s', func, ex, traceback.format_exc())
|
||||
return None
|
||||
|
||||
|
||||
class DescriptorMeta(type):
|
||||
def __init__(self, name, bases, attrs):
|
||||
super(DescriptorMeta, self).__init__(name, bases, attrs)
|
||||
|
||||
Interface.object_map[self.__name__] = self
|
||||
|
||||
|
||||
@add_metaclass(DescriptorMeta)
|
||||
class Descriptor(Interface):
|
||||
attribute_map = None
|
||||
|
||||
def __init__(self, client, path):
|
||||
super(Descriptor, self).__init__(client)
|
||||
self.path = path
|
||||
|
||||
self._children = None
|
||||
|
||||
@classmethod
|
||||
def properties(cls):
|
||||
keys = [k for k in dir(cls) if not k.startswith('_')]
|
||||
|
||||
#log.debug('%s - keys: %s', self, keys)
|
||||
|
||||
for key in keys:
|
||||
if key.startswith('_'):
|
||||
continue
|
||||
|
||||
value = getattr(cls, key)
|
||||
|
||||
if value is Property:
|
||||
yield key, Property(key)
|
||||
elif isinstance(value, Property):
|
||||
yield key, value
|
||||
|
||||
@classmethod
|
||||
def construct(cls, client, node, attribute_map=None, path=None, child=False):
|
||||
if node is None:
|
||||
return [], None
|
||||
|
||||
keys_available = [k.lower() for k in node.keys()]
|
||||
keys_used = []
|
||||
|
||||
if attribute_map is None:
|
||||
attribute_map = cls.attribute_map or {}
|
||||
|
||||
require_map = attribute_map.get('*') != '*'
|
||||
|
||||
# Determine path from object "key"
|
||||
key = cls.helpers.get(node, 'key')
|
||||
|
||||
if key is not None:
|
||||
path = key[:key.rfind('/')]
|
||||
|
||||
# Construct object
|
||||
obj = cls(client, path)
|
||||
|
||||
#log.debug('%s - Properties: %s', cls.__name__, list(obj.properties()))
|
||||
|
||||
for key, prop in cls.properties():
|
||||
node_key = prop.name or key
|
||||
|
||||
if attribute_map:
|
||||
if node_key in attribute_map:
|
||||
node_key = attribute_map.get(node_key)
|
||||
elif require_map:
|
||||
setattr(obj, key, None)
|
||||
continue
|
||||
|
||||
#log.debug('%s - Found property "%s"', cls.__name__, key)
|
||||
setattr(obj, key, prop.value(client, node_key, node, keys_used))
|
||||
|
||||
# Post-fill transformation
|
||||
obj.__transform__()
|
||||
|
||||
# Look for omitted keys
|
||||
omitted = list(set(keys_available) - set(keys_used))
|
||||
omitted.sort()
|
||||
|
||||
if omitted and not child:
|
||||
log.warn('%s - Omitted attributes: %s', cls.__name__, ', '.join(omitted))
|
||||
|
||||
return keys_used, obj
|
||||
|
||||
def __transform__(self):
|
||||
pass
|
||||
|
||||
def __iter__(self):
|
||||
return self._children or []
|
||||
|
||||
def __getstate__(self):
|
||||
data = self.__dict__
|
||||
|
||||
def build():
|
||||
for key, value in data.items():
|
||||
if isinstance(value, types.GeneratorType):
|
||||
value = list(value)
|
||||
|
||||
if key in ['client']:
|
||||
continue
|
||||
|
||||
yield key, value
|
||||
|
||||
return dict(build())
|
||||
|
||||
|
||||
class DescriptorMixin(Descriptor):
|
||||
pass
|
|
@ -1,89 +0,0 @@
|
|||
import inspect
|
||||
import logging
|
||||
import os
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
UNC_PREFIX = '\\\\?\\'
|
||||
|
||||
|
||||
class ObjectManager(object):
|
||||
base_dir = None
|
||||
objects_dir = None
|
||||
objects_map = {}
|
||||
|
||||
ignore_files = [
|
||||
'__init__.py'
|
||||
]
|
||||
ignore_paths = [
|
||||
'plex\\objects\\core\\base.py',
|
||||
'plex\\objects\\core\\manager.py'
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def discover(cls):
|
||||
cls.objects_dir = os.path.join(cls.base_dir, 'plex', 'objects')
|
||||
|
||||
# Walk plex/objects directory
|
||||
for current, directories, files in os.walk(cls.objects_dir):
|
||||
|
||||
# Iterate files, yield valid paths
|
||||
for filename in files:
|
||||
if not filename.endswith('.py'):
|
||||
continue
|
||||
|
||||
# Ensure filename is not in ignore list
|
||||
if filename in cls.ignore_files:
|
||||
continue
|
||||
|
||||
path = os.path.join(current, filename)
|
||||
|
||||
# Ensure path is not in ignore list
|
||||
if not all([not path.endswith(p) for p in cls.ignore_paths]):
|
||||
continue
|
||||
|
||||
# Remove UNC prefix (if it exists)
|
||||
if path.startswith(UNC_PREFIX):
|
||||
path = path[len(UNC_PREFIX):]
|
||||
|
||||
path = os.path.relpath(path, cls.base_dir)
|
||||
name = os.path.splitext(path)[0].replace(os.path.sep, '.')
|
||||
|
||||
yield path, name
|
||||
|
||||
@classmethod
|
||||
def load(cls):
|
||||
for path, name in cls.discover():
|
||||
try:
|
||||
mod = __import__(name, fromlist=['*'])
|
||||
except Exception as ex:
|
||||
log.warn('Unable to import "%s" - %s', name, ex)
|
||||
continue
|
||||
|
||||
# Get classes in module
|
||||
classes = [
|
||||
(key, getattr(mod, key)) for key in dir(mod)
|
||||
if not key.startswith('_')
|
||||
]
|
||||
|
||||
# Filter to module-specific classes
|
||||
classes = [
|
||||
(key, value) for (key, value) in classes
|
||||
if inspect.isclass(value) and value.__module__ == name
|
||||
]
|
||||
|
||||
yield classes
|
||||
|
||||
@classmethod
|
||||
def construct(cls):
|
||||
log.debug('Loading descriptors...')
|
||||
|
||||
cls.base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../', '..'))
|
||||
|
||||
# Load modules, find descriptor classes
|
||||
for classes in cls.load():
|
||||
# Update object map
|
||||
for key, value in classes:
|
||||
cls.objects_map[key] = value
|
||||
|
||||
log.debug('Loaded %s descriptors (%s)', len(cls.objects_map), ', '.join(sorted(cls.objects_map.keys())))
|
|
@ -1,62 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
from plex.objects.container import Container
|
||||
|
||||
|
||||
class Detail(Container):
|
||||
myplex = Property(resolver=lambda: Detail.construct_myplex)
|
||||
transcoder = Property(resolver=lambda: Detail.construct_transcoder)
|
||||
|
||||
friendly_name = Property('friendlyName')
|
||||
|
||||
machine_identifier = Property('machineIdentifier')
|
||||
version = Property
|
||||
|
||||
platform = Property
|
||||
platform_version = Property('platformVersion')
|
||||
|
||||
allow_camera_upload = Property('allowCameraUpload', [int, bool])
|
||||
allow_channel_access = Property('allowChannelAccess', [int, bool])
|
||||
allow_sync = Property('allowSync', [int, bool])
|
||||
|
||||
certificate = Property(type=[int, bool])
|
||||
multiuser = Property(type=[int, bool])
|
||||
sync = Property(type=[int, bool])
|
||||
|
||||
start_state = Property('startState')
|
||||
|
||||
silverlight = Property('silverlightInstalled', [int, bool])
|
||||
soundflower = Property('soundflowerInstalled', [int, bool])
|
||||
flash = Property('flashInstalled', [int, bool])
|
||||
webkit = Property(type=[int, bool])
|
||||
|
||||
cookie_parameters = Property('requestParametersInCookie', [int, bool])
|
||||
|
||||
@staticmethod
|
||||
def construct_myplex(client, node):
|
||||
return MyPlexDetail.construct(client, node, child=True)
|
||||
|
||||
@staticmethod
|
||||
def construct_transcoder(client, node):
|
||||
return TranscoderDetail.construct(client, node, child=True)
|
||||
|
||||
|
||||
class MyPlexDetail(Descriptor):
|
||||
enabled = Property('myPlex', type=bool)
|
||||
|
||||
username = Property('myPlexUsername')
|
||||
|
||||
mapping_state = Property('myPlexMappingState')
|
||||
signin_state = Property('myPlexSigninState')
|
||||
|
||||
subscription = Property('myPlexSubscription', [int, bool])
|
||||
|
||||
|
||||
class TranscoderDetail(Descriptor):
|
||||
audio = Property('transcoderAudio', [int, bool])
|
||||
video = Property('transcoderVideo', [int, bool])
|
||||
|
||||
video_bitrates = Property('transcoderVideoBitrates')
|
||||
video_qualities = Property('transcoderVideoQualities')
|
||||
video_resolutions = Property('transcoderVideoResolutions')
|
||||
|
||||
active_video_sessions = Property('transcoderActiveVideoSessions', int)
|
|
@ -1,16 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Directory(Descriptor):
|
||||
key = Property
|
||||
type = Property
|
||||
|
||||
title = Property
|
||||
|
||||
size = Property
|
||||
|
||||
art = Property
|
||||
thumb = Property
|
||||
|
||||
allow_sync = Property('allowSync', bool)
|
||||
updated_at = Property('updatedAt', int)
|
|
@ -1,80 +0,0 @@
|
|||
from plex.core.helpers import flatten, to_iterable
|
||||
from plex.objects.core.base import Property
|
||||
from plex.objects.container import Container
|
||||
from plex.objects.library.section import Section
|
||||
|
||||
|
||||
class MediaContainer(Container):
|
||||
section = Property(resolver=lambda: MediaContainer.construct_section)
|
||||
|
||||
title1 = Property
|
||||
title2 = Property
|
||||
|
||||
identifier = Property
|
||||
|
||||
art = Property
|
||||
thumb = Property
|
||||
|
||||
view_group = Property('viewGroup')
|
||||
view_mode = Property('viewMode', int)
|
||||
|
||||
media_tag_prefix = Property('mediaTagPrefix')
|
||||
media_tag_version = Property('mediaTagVersion')
|
||||
|
||||
size = Property('size', int)
|
||||
total_size = Property('totalSize', int)
|
||||
|
||||
allow_sync = Property('allowSync', bool)
|
||||
mixed_parents = Property('mixedParents', bool)
|
||||
no_cache = Property('nocache', bool)
|
||||
sort_asc = Property('sortAsc', bool)
|
||||
|
||||
@staticmethod
|
||||
def construct_section(client, node):
|
||||
attribute_map = {
|
||||
'key': 'librarySectionID',
|
||||
'uuid': 'librarySectionUUID',
|
||||
'title': 'librarySectionTitle'
|
||||
}
|
||||
|
||||
return Section.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(MediaContainer, self).__iter__():
|
||||
item.section = self.section
|
||||
|
||||
yield item
|
||||
|
||||
|
||||
class ChildrenContainer(MediaContainer):
|
||||
pass
|
||||
|
||||
|
||||
class LeavesContainer(MediaContainer):
|
||||
pass
|
||||
|
||||
|
||||
class SectionContainer(MediaContainer):
|
||||
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
|
||||
|
||||
def filter(self, types=None, keys=None, titles=None):
|
||||
types = to_iterable(types)
|
||||
keys = to_iterable(keys)
|
||||
|
||||
titles = to_iterable(titles)
|
||||
|
||||
if titles:
|
||||
# Flatten titles
|
||||
titles = [flatten(x) for x in titles]
|
||||
|
||||
for section in self:
|
||||
if not self.filter_passes(types, section.type):
|
||||
continue
|
||||
|
||||
if not self.filter_passes(keys, section.key):
|
||||
continue
|
||||
|
||||
if not self.filter_passes(titles, flatten(section.title)):
|
||||
continue
|
||||
|
||||
yield section
|
|
@ -1,10 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Country(Descriptor):
|
||||
id = Property(type=int)
|
||||
tag = Property
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
return cls.construct(client, cls.helpers.find(node, 'Country'), child=True)
|
|
@ -1,10 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Director(Descriptor):
|
||||
id = Property(type=int)
|
||||
tag = Property
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
return cls.construct(client, cls.helpers.find(node, 'Director'), child=True)
|
|
@ -1,17 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Genre(Descriptor):
|
||||
id = Property(type=int)
|
||||
tag = Property
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Genre'):
|
||||
_, obj = Genre.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,20 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Role(Descriptor):
|
||||
id = Property(type=int)
|
||||
tag = Property
|
||||
|
||||
role = Property
|
||||
thumb = Property
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Role'):
|
||||
_, obj = Role.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,17 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Writer(Descriptor):
|
||||
id = Property(type=int)
|
||||
tag = Property
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Writer'):
|
||||
_, obj = Writer.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,6 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Location(Descriptor):
|
||||
id = Property
|
||||
path = Property
|
|
@ -1,39 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
from plex.objects.library.part import Part
|
||||
|
||||
|
||||
class Media(Descriptor):
|
||||
parts = Property(resolver=lambda: Part.from_node)
|
||||
|
||||
id = Property(type=int)
|
||||
|
||||
video_codec = Property('videoCodec')
|
||||
video_frame_rate = Property('videoFrameRate')
|
||||
video_resolution = Property('videoResolution')
|
||||
|
||||
audio_channels = Property('audioChannels', type=int)
|
||||
audio_codec = Property('audioCodec')
|
||||
|
||||
container = Property
|
||||
|
||||
width = Property(type=int)
|
||||
height = Property(type=int)
|
||||
|
||||
aspect_ratio = Property('aspectRatio', type=float)
|
||||
bitrate = Property(type=int)
|
||||
duration = Property(type=int)
|
||||
|
||||
#@classmethod
|
||||
#def from_node(cls, client, node):
|
||||
# return cls.construct(client, cls.helpers.find(node, 'Media'), child=True)
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Media'):
|
||||
_, obj = Media.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,68 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
from plex.objects.library.container import ChildrenContainer
|
||||
from plex.objects.library.extra.genre import Genre
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.library.metadata.artist import Artist
|
||||
from plex.objects.mixins.rate import RateMixin
|
||||
|
||||
|
||||
class Album(Directory, Metadata, RateMixin):
|
||||
artist = Property(resolver=lambda: Album.construct_artist)
|
||||
genres = Property(resolver=lambda: Genre.from_node)
|
||||
|
||||
index = Property(type=int)
|
||||
|
||||
year = Property(type=int)
|
||||
originally_available_at = Property('originallyAvailableAt')
|
||||
|
||||
track_count = Property('leafCount', int)
|
||||
viewed_track_count = Property('viewedLeafCount', int)
|
||||
|
||||
def children(self):
|
||||
return self.client['library/metadata'].children(self.rating_key)
|
||||
|
||||
@staticmethod
|
||||
def construct_artist(client, node):
|
||||
attribute_map = {
|
||||
'key': 'parentKey',
|
||||
'ratingKey': 'parentRatingKey',
|
||||
|
||||
'title': 'parentTitle',
|
||||
'thumb': 'parentThumb'
|
||||
}
|
||||
|
||||
return Artist.construct(client, node, attribute_map, child=True)
|
||||
|
||||
|
||||
class AlbumChildrenContainer(ChildrenContainer):
|
||||
artist = Property(resolver=lambda: AlbumChildrenContainer.construct_artist)
|
||||
album = Property(resolver=lambda: AlbumChildrenContainer.construct_album)
|
||||
|
||||
key = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_artist(client, node):
|
||||
attribute_map = {
|
||||
'title': 'grandparentTitle'
|
||||
}
|
||||
|
||||
return Artist.construct(client, node, attribute_map, child=True)
|
||||
|
||||
@staticmethod
|
||||
def construct_album(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
|
||||
'title': 'parentTitle',
|
||||
'year' : 'parentYear'
|
||||
}
|
||||
|
||||
return Album.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(ChildrenContainer, self).__iter__():
|
||||
item.artist = self.artist
|
||||
item.album = self.album
|
||||
|
||||
yield item
|
|
@ -1,58 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
from plex.objects.library.container import LeavesContainer, ChildrenContainer
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.mixins.rate import RateMixin
|
||||
|
||||
|
||||
class Artist(Directory, Metadata, RateMixin):
|
||||
index = Property(type=int)
|
||||
|
||||
def all_leaves(self):
|
||||
return self.client['library/metadata'].all_leaves(self.rating_key)
|
||||
|
||||
def children(self):
|
||||
return self.client['library/metadata'].children(self.rating_key)
|
||||
|
||||
|
||||
class ArtistChildrenContainer(ChildrenContainer):
|
||||
artist = Property(resolver=lambda: ArtistChildrenContainer.construct_artist)
|
||||
|
||||
key = Property
|
||||
summary = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_artist(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
'title': 'parentTitle'
|
||||
}
|
||||
|
||||
return Artist.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(ChildrenContainer, self).__iter__():
|
||||
item.artist = self.artist
|
||||
|
||||
yield item
|
||||
|
||||
|
||||
class ArtistLeavesContainer(LeavesContainer):
|
||||
artist = Property(resolver=lambda: ArtistLeavesContainer.construct_artist)
|
||||
|
||||
key = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_artist(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
'title': 'parentTitle'
|
||||
}
|
||||
|
||||
return Artist.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(LeavesContainer, self).__iter__():
|
||||
item.artist = self.artist
|
||||
|
||||
yield item
|
|
@ -1,38 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
from plex.objects.library.section import Section
|
||||
|
||||
|
||||
class Metadata(Descriptor):
|
||||
section = Property(resolver=lambda: Metadata.construct_section)
|
||||
|
||||
# somehow section doesn't resolve on onDeck, add key manually
|
||||
section_key = Property('librarySectionID')
|
||||
|
||||
key = Property
|
||||
guid = Property
|
||||
rating_key = Property('ratingKey')
|
||||
extra_key = Property('primaryExtraKey')
|
||||
|
||||
title = Property
|
||||
title_sort = Property('titleSort')
|
||||
title_original = Property('originalTitle')
|
||||
|
||||
summary = Property
|
||||
|
||||
thumb = Property
|
||||
|
||||
source_title = Property('sourceTitle')
|
||||
|
||||
added_at = Property('addedAt', int)
|
||||
last_viewed_at = Property('lastViewedAt', int)
|
||||
|
||||
@staticmethod
|
||||
def construct_section(client, node):
|
||||
attribute_map = {
|
||||
'key': 'librarySectionID',
|
||||
'uuid': 'librarySectionUUID',
|
||||
'title': 'librarySectionTitle'
|
||||
}
|
||||
|
||||
return Section.construct(client, node, attribute_map, child=True)
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.library.video import Video
|
||||
|
||||
|
||||
class Clip(Video, Metadata):
|
||||
extra_type = Property('extraType', type=int)
|
||||
|
||||
index = Property(type=int)
|
|
@ -1,48 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.library.metadata.season import Season
|
||||
from plex.objects.library.metadata.show import Show
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.library.video import Video
|
||||
from plex.objects.mixins.rate import RateMixin
|
||||
from plex.objects.mixins.scrobble import ScrobbleMixin
|
||||
|
||||
|
||||
class Episode(Video, Metadata, RateMixin, ScrobbleMixin):
|
||||
show = Property(resolver=lambda: Episode.construct_show)
|
||||
season = Property(resolver=lambda: Episode.construct_season)
|
||||
|
||||
index = Property(type=int)
|
||||
|
||||
studio = Property
|
||||
audience_rating = Property('audienceRating', float)
|
||||
content_rating = Property('contentRating')
|
||||
|
||||
year = Property(type=int)
|
||||
originally_available_at = Property('originallyAvailableAt')
|
||||
|
||||
@staticmethod
|
||||
def construct_show(client, node):
|
||||
attribute_map = {
|
||||
'key': 'grandparentKey',
|
||||
'ratingKey': 'grandparentRatingKey',
|
||||
|
||||
'title': 'grandparentTitle',
|
||||
|
||||
'art': 'grandparentArt',
|
||||
'theme': 'grandparentTheme',
|
||||
'thumb': 'grandparentThumb'
|
||||
}
|
||||
|
||||
return Show.construct(client, node, attribute_map, child=True)
|
||||
|
||||
@staticmethod
|
||||
def construct_season(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
'key': 'parentKey',
|
||||
'ratingKey': 'parentRatingKey',
|
||||
|
||||
'thumb': 'parentThumb'
|
||||
}
|
||||
|
||||
return Season.construct(client, node, attribute_map, child=True)
|
|
@ -1,22 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.library.extra.country import Country
|
||||
from plex.objects.library.extra.genre import Genre
|
||||
from plex.objects.library.extra.role import Role
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.library.video import Video
|
||||
from plex.objects.mixins.rate import RateMixin
|
||||
from plex.objects.mixins.scrobble import ScrobbleMixin
|
||||
|
||||
|
||||
class Movie(Video, Metadata, RateMixin, ScrobbleMixin):
|
||||
country = Property(resolver=lambda: Country.from_node)
|
||||
genres = Property(resolver=lambda: Genre.from_node)
|
||||
roles = Property(resolver=lambda: Role.from_node)
|
||||
|
||||
studio = Property
|
||||
content_rating = Property('contentRating')
|
||||
|
||||
year = Property(type=int)
|
||||
originally_available_at = Property('originallyAvailableAt')
|
||||
|
||||
tagline = Property
|
|
@ -1,78 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.library.container import ChildrenContainer
|
||||
from plex.objects.library.metadata.show import Show
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.library.video import Directory
|
||||
|
||||
|
||||
class Season(Directory, Metadata):
|
||||
show = Property(resolver=lambda: Season.construct_show)
|
||||
|
||||
index = Property(type=int)
|
||||
|
||||
banner = Property
|
||||
theme = Property
|
||||
|
||||
year = Property(type=int)
|
||||
|
||||
episode_count = Property('leafCount', int)
|
||||
viewed_episode_count = Property('viewedLeafCount', int)
|
||||
|
||||
view_count = Property('viewCount', type=int)
|
||||
|
||||
def children(self):
|
||||
return self.client['library/metadata'].children(self.rating_key)
|
||||
|
||||
@staticmethod
|
||||
def construct_show(client, node):
|
||||
attribute_map = {
|
||||
'index' : 'parentIndex',
|
||||
'key' : 'parentKey',
|
||||
'ratingKey': 'parentRatingKey',
|
||||
|
||||
'title' : 'parentTitle',
|
||||
'summary' : 'parentSummary',
|
||||
'thumb' : 'parentThumb',
|
||||
|
||||
'theme' : 'parentTheme'
|
||||
}
|
||||
|
||||
return Show.construct(client, node, attribute_map, child=True)
|
||||
|
||||
|
||||
class SeasonChildrenContainer(ChildrenContainer):
|
||||
show = Property(resolver=lambda: SeasonChildrenContainer.construct_show)
|
||||
season = Property(resolver=lambda: SeasonChildrenContainer.construct_season)
|
||||
|
||||
key = Property
|
||||
|
||||
banner = Property
|
||||
theme = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_show(client, node):
|
||||
attribute_map = {
|
||||
'title' : 'grandparentTitle',
|
||||
|
||||
'contentRating': 'grandparentContentRating',
|
||||
'studio' : 'grandparentStudio',
|
||||
'theme' : 'grandparentTheme'
|
||||
}
|
||||
|
||||
return Show.construct(client, node, attribute_map, child=True)
|
||||
|
||||
@staticmethod
|
||||
def construct_season(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
'title': 'parentTitle'
|
||||
}
|
||||
|
||||
return Season.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(ChildrenContainer, self).__iter__():
|
||||
item.show = self.show
|
||||
item.season = self.season
|
||||
|
||||
yield item
|
|
@ -1,85 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
from plex.objects.library.container import LeavesContainer, ChildrenContainer
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.mixins.rate import RateMixin
|
||||
|
||||
|
||||
class Show(Directory, Metadata, RateMixin):
|
||||
index = Property(type=int)
|
||||
duration = Property(type=int)
|
||||
|
||||
studio = Property
|
||||
content_rating = Property('contentRating')
|
||||
|
||||
banner = Property
|
||||
theme = Property
|
||||
|
||||
year = Property(type=int)
|
||||
originally_available_at = Property('originallyAvailableAt')
|
||||
|
||||
season_count = Property('childCount', int)
|
||||
|
||||
episode_count = Property('leafCount', int)
|
||||
viewed_episode_count = Property('viewedLeafCount', int)
|
||||
|
||||
view_count = Property('viewCount', int)
|
||||
|
||||
def all_leaves(self):
|
||||
return self.client['library/metadata'].all_leaves(self.rating_key)
|
||||
|
||||
def children(self):
|
||||
return self.client['library/metadata'].children(self.rating_key)
|
||||
|
||||
|
||||
class ShowChildrenContainer(ChildrenContainer):
|
||||
show = Property(resolver=lambda: ShowLeavesContainer.construct_show)
|
||||
|
||||
key = Property
|
||||
summary = Property
|
||||
|
||||
banner = Property
|
||||
theme = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_show(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
|
||||
'title': 'parentTitle',
|
||||
'year' : 'parentYear'
|
||||
}
|
||||
|
||||
return Show.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(ChildrenContainer, self).__iter__():
|
||||
item.show = self.show
|
||||
|
||||
yield item
|
||||
|
||||
|
||||
class ShowLeavesContainer(LeavesContainer):
|
||||
show = Property(resolver=lambda: ShowLeavesContainer.construct_show)
|
||||
|
||||
key = Property
|
||||
|
||||
banner = Property
|
||||
theme = Property
|
||||
|
||||
@staticmethod
|
||||
def construct_show(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
|
||||
'title': 'parentTitle',
|
||||
'year' : 'parentYear'
|
||||
}
|
||||
|
||||
return Show.construct(client, node, attribute_map, child=True)
|
||||
|
||||
def __iter__(self):
|
||||
for item in super(LeavesContainer, self).__iter__():
|
||||
item.show = self.show
|
||||
|
||||
yield item
|
|
@ -1,47 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
from plex.objects.library.metadata.album import Album
|
||||
from plex.objects.library.metadata.artist import Artist
|
||||
from plex.objects.library.metadata.base import Metadata
|
||||
from plex.objects.mixins.scrobble import ScrobbleMixin
|
||||
from plex.objects.mixins.session import SessionMixin
|
||||
|
||||
|
||||
class Track(Directory, Metadata, SessionMixin, ScrobbleMixin):
|
||||
artist = Property(resolver=lambda: Track.construct_artist)
|
||||
album = Property(resolver=lambda: Track.construct_album)
|
||||
|
||||
index = Property(type=int)
|
||||
|
||||
view_count = Property('viewCount', type=int)
|
||||
view_offset = Property('viewOffset', type=int)
|
||||
|
||||
duration = Property(type=int)
|
||||
|
||||
@staticmethod
|
||||
def construct_artist(client, node):
|
||||
attribute_map = {
|
||||
'key': 'grandparentKey',
|
||||
'ratingKey': 'grandparentRatingKey',
|
||||
|
||||
'title': 'grandparentTitle',
|
||||
|
||||
'thumb': 'grandparentThumb'
|
||||
}
|
||||
|
||||
return Artist.construct(client, node, attribute_map, child=True)
|
||||
|
||||
@staticmethod
|
||||
def construct_album(client, node):
|
||||
attribute_map = {
|
||||
'index': 'parentIndex',
|
||||
'key': 'parentKey',
|
||||
'ratingKey': 'parentRatingKey',
|
||||
|
||||
'title': 'parentTitle',
|
||||
'year': 'parentYear',
|
||||
|
||||
'thumb': 'parentThumb'
|
||||
}
|
||||
|
||||
return Album.construct(client, node, attribute_map, child=True)
|
|
@ -1,26 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
from plex.objects.library.stream import Stream
|
||||
|
||||
|
||||
class Part(Descriptor):
|
||||
streams = Property(resolver=lambda: Stream.from_node)
|
||||
|
||||
id = Property(type=int)
|
||||
key = Property
|
||||
|
||||
file = Property
|
||||
container = Property
|
||||
|
||||
duration = Property(type=int)
|
||||
size = Property(type=int)
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Part'):
|
||||
_, obj = Part.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,37 +0,0 @@
|
|||
from plex.core.idict import idict
|
||||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
|
||||
|
||||
class Section(Directory):
|
||||
uuid = Property
|
||||
|
||||
filters = Property(type=bool)
|
||||
refreshing = Property(type=bool)
|
||||
|
||||
agent = Property
|
||||
scanner = Property
|
||||
language = Property
|
||||
|
||||
composite = Property
|
||||
type = Property
|
||||
|
||||
created_at = Property('createdAt', int)
|
||||
|
||||
def __transform__(self):
|
||||
self.path = '/library/sections/%s' % self.key
|
||||
|
||||
def all(self):
|
||||
response = self.http.get('all')
|
||||
|
||||
return self.parse(response, idict({
|
||||
'MediaContainer': ('MediaContainer', idict({
|
||||
'Directory': {
|
||||
'artist': 'Artist',
|
||||
'show': 'Show'
|
||||
},
|
||||
'Video': {
|
||||
'movie': 'Movie'
|
||||
}
|
||||
}))
|
||||
}))
|
|
@ -1,56 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Stream(Descriptor):
|
||||
id = Property(type=int)
|
||||
index = Property(type=int)
|
||||
|
||||
stream_key = Property('key')
|
||||
|
||||
stream_type = Property('streamType', type=int)
|
||||
selected = Property(type=bool)
|
||||
|
||||
forced = Property(type=bool)
|
||||
default = Property(type=bool)
|
||||
|
||||
title = Property
|
||||
duration = Property(type=int)
|
||||
|
||||
codec = Property
|
||||
codec_id = Property('codecID')
|
||||
|
||||
bit_depth = Property('bitDepth', type=int)
|
||||
chroma_subsampling = Property('chromaSubsampling')
|
||||
color_space = Property('colorSpace')
|
||||
|
||||
width = Property(type=int)
|
||||
height = Property(type=int)
|
||||
|
||||
bitrate = Property(type=int)
|
||||
bitrate_mode = Property('bitrateMode')
|
||||
|
||||
channels = Property(type=int)
|
||||
sampling_rate = Property('samplingRate', type=int)
|
||||
|
||||
frame_rate = Property('frameRate')
|
||||
profile = Property
|
||||
scan_type = Property('scanType')
|
||||
|
||||
language = Property('language')
|
||||
language_code = Property('languageCode')
|
||||
|
||||
bvop = Property(type=int)
|
||||
gmc = Property(type=int)
|
||||
level = Property(type=int)
|
||||
qpel = Property(type=int)
|
||||
|
||||
@classmethod
|
||||
def from_node(cls, client, node):
|
||||
items = []
|
||||
|
||||
for genre in cls.helpers.findall(node, 'Stream'):
|
||||
_, obj = Stream.construct(client, genre, child=True)
|
||||
|
||||
items.append(obj)
|
||||
|
||||
return [], items
|
|
@ -1,22 +0,0 @@
|
|||
from plex.objects.core.base import Property
|
||||
from plex.objects.directory import Directory
|
||||
from plex.objects.library.extra.director import Director
|
||||
from plex.objects.library.extra.writer import Writer
|
||||
from plex.objects.library.media import Media
|
||||
from plex.objects.mixins.session import SessionMixin
|
||||
|
||||
|
||||
class Video(Directory, SessionMixin):
|
||||
director = Property(resolver=lambda: Director.from_node)
|
||||
media = Property(resolver=lambda: Media.from_node)
|
||||
writers = Property(resolver=lambda: Writer.from_node)
|
||||
|
||||
view_count = Property('viewCount', type=int)
|
||||
view_offset = Property('viewOffset', type=int)
|
||||
|
||||
chapter_source = Property('chapterSource')
|
||||
duration = Property(type=int)
|
||||
|
||||
@property
|
||||
def seen(self):
|
||||
return self.view_count and self.view_count >= 1
|
|
@ -1,10 +0,0 @@
|
|||
from plex import Plex
|
||||
from plex.objects.core.base import Property, DescriptorMixin
|
||||
|
||||
|
||||
class RateMixin(DescriptorMixin):
|
||||
rating = Property(type=float)
|
||||
user_rating = Property('userRating', type=float)
|
||||
|
||||
def rate(self, value):
|
||||
return Plex['library'].rate(self.rating_key, value)
|
|
@ -1,7 +0,0 @@
|
|||
from plex import Plex
|
||||
from plex.objects.core.base import DescriptorMixin
|
||||
|
||||
|
||||
class ScrobbleMixin(DescriptorMixin):
|
||||
def scrobble(self):
|
||||
return Plex['library'].scrobble(self.rating_key)
|
|
@ -1,32 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property, DescriptorMixin
|
||||
from plex.objects.player import Player
|
||||
from plex.objects.transcode_session import TranscodeSession
|
||||
from plex.objects.user import User
|
||||
|
||||
|
||||
class SessionMixin(DescriptorMixin):
|
||||
session = Property(resolver=lambda: SessionMixin.construct_session)
|
||||
|
||||
@staticmethod
|
||||
def construct_session(client, node):
|
||||
return Session.construct(client, node, child=True)
|
||||
|
||||
|
||||
class Session(Descriptor):
|
||||
key = Property('sessionKey', int)
|
||||
|
||||
user = Property(resolver=lambda: Session.construct_user)
|
||||
player = Property(resolver=lambda: Session.construct_player)
|
||||
transcode_session = Property(resolver=lambda: Session.construct_transcode_session)
|
||||
|
||||
@classmethod
|
||||
def construct_user(cls, client, node):
|
||||
return User.construct(client, cls.helpers.find(node, 'User'), child=True)
|
||||
|
||||
@classmethod
|
||||
def construct_player(cls, client, node):
|
||||
return Player.construct(client, cls.helpers.find(node, 'Player'), child=True)
|
||||
|
||||
@classmethod
|
||||
def construct_transcode_session(cls, client, node):
|
||||
return TranscodeSession.construct(client, cls.helpers.find(node, 'TranscodeSession'), child=True)
|
|
@ -1,11 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Player(Descriptor):
|
||||
title = Property
|
||||
machine_identifier = Property('machineIdentifier')
|
||||
|
||||
state = Property
|
||||
|
||||
platform = Property
|
||||
product = Property
|
|
@ -1,12 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Server(Descriptor):
|
||||
name = Property
|
||||
host = Property
|
||||
|
||||
address = Property
|
||||
port = Property(type=int)
|
||||
|
||||
machine_identifier = Property('machineIdentifier')
|
||||
version = Property
|
|
@ -1,21 +0,0 @@
|
|||
from plex.core.helpers import to_iterable
|
||||
from plex.objects.library.container import MediaContainer
|
||||
|
||||
|
||||
class SessionContainer(MediaContainer):
|
||||
filter_passes = lambda _, allowed, value: allowed is None or value in allowed
|
||||
|
||||
def filter(self, keys=None):
|
||||
keys = to_iterable(keys)
|
||||
|
||||
for item in self:
|
||||
if not self.filter_passes(keys, item.session.key):
|
||||
continue
|
||||
|
||||
yield item
|
||||
|
||||
def get(self, key):
|
||||
for item in self.filter(key):
|
||||
return item
|
||||
|
||||
return None
|
|
@ -1,53 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class Setting(Descriptor):
|
||||
id = Property
|
||||
|
||||
label = Property
|
||||
summary = Property
|
||||
|
||||
type = Property
|
||||
group = Property
|
||||
|
||||
value = Property(resolver=lambda: Setting.parse_value)
|
||||
default = Property(resolver=lambda: Setting.parse_default)
|
||||
options = Property('enumValues', resolver=lambda: Setting.parse_options)
|
||||
|
||||
hidden = Property(type=[int, bool])
|
||||
advanced = Property(type=[int, bool])
|
||||
|
||||
@classmethod
|
||||
def parse_value(cls, client, node):
|
||||
type = cls.helpers.get(node, 'type')
|
||||
value = cls.helpers.get(node, 'value')
|
||||
|
||||
return ['value'], Setting.convert(type, value)
|
||||
|
||||
@classmethod
|
||||
def parse_default(cls, client, node):
|
||||
type = cls.helpers.get(node, 'type')
|
||||
default = cls.helpers.get(node, 'default')
|
||||
|
||||
return ['default'], Setting.convert(type, default)
|
||||
|
||||
@classmethod
|
||||
def parse_options(cls, client, node):
|
||||
value = cls.helpers.get(node, 'enumValues')
|
||||
|
||||
if not value:
|
||||
return [], None
|
||||
|
||||
return ['enumValues'], [
|
||||
tuple(option.split(':', 2)) for option in value.split('|')
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def convert(type, value):
|
||||
if type == 'bool':
|
||||
value = value.lower()
|
||||
value = value == 'true'
|
||||
elif type == 'int':
|
||||
value = int(value)
|
||||
|
||||
return value
|
|
@ -1,24 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class TranscodeSession(Descriptor):
|
||||
key = Property
|
||||
|
||||
progress = Property(type=float)
|
||||
speed = Property(type=float)
|
||||
duration = Property(type=int)
|
||||
|
||||
protocol = Property
|
||||
throttled = Property(type=int) # TODO this needs to cast: str -> int -> bool
|
||||
|
||||
container = Property('container')
|
||||
|
||||
video_codec = Property('videoCodec')
|
||||
video_decision = Property('videoDecision')
|
||||
|
||||
audio_codec = Property('audioCodec')
|
||||
audio_channels = Property('audioChannels', int)
|
||||
audio_decision = Property('audioDecision')
|
||||
|
||||
width = Property(type=int)
|
||||
height = Property(type=int)
|
|
@ -1,8 +0,0 @@
|
|||
from plex.objects.core.base import Descriptor, Property
|
||||
|
||||
|
||||
class User(Descriptor):
|
||||
id = Property(type=int)
|
||||
|
||||
title = Property
|
||||
thumb = Property
|
|
@ -1,121 +0,0 @@
|
|||
from plex.lib.six.moves.urllib_parse import urlencode
|
||||
|
||||
from requests import Request
|
||||
import json
|
||||
|
||||
|
||||
class PlexRequest(object):
|
||||
def __init__(self, client, **kwargs):
|
||||
self.client = client
|
||||
self.kwargs = kwargs
|
||||
|
||||
self.request = None
|
||||
|
||||
# Parsed Attributes
|
||||
self.path = None
|
||||
self.params = None
|
||||
|
||||
self.data = None
|
||||
self.headers = None
|
||||
self.method = None
|
||||
|
||||
def prepare(self):
|
||||
self.request = Request()
|
||||
|
||||
self.transform_parameters()
|
||||
self.request.url = self.construct_url()
|
||||
|
||||
self.request.data = self.transform_data()
|
||||
self.request.headers = self.transform_headers()
|
||||
self.request.method = self.transform_method()
|
||||
|
||||
return self.request.prepare()
|
||||
|
||||
def construct_url(self):
|
||||
"""Construct a full plex request URI, with `params`."""
|
||||
path = [self.path]
|
||||
path.extend([str(x) for x in self.params])
|
||||
|
||||
url = self.client.base_url + '/'.join(x for x in path if x)
|
||||
query = self.kwargs.get('query')
|
||||
|
||||
if query:
|
||||
# Dict -> List
|
||||
if type(query) is dict:
|
||||
query = query.items()
|
||||
|
||||
# Remove items with `None` value
|
||||
query = [
|
||||
(k, v) for (k, v) in query
|
||||
if v is not None
|
||||
]
|
||||
|
||||
# Encode query, append to URL
|
||||
url += '?' + urlencode(query)
|
||||
|
||||
return url
|
||||
|
||||
def transform_parameters(self):
|
||||
# Transform `path`
|
||||
self.path = self.kwargs.get('path')
|
||||
|
||||
if not self.path.startswith('/'):
|
||||
self.path = '/' + self.path
|
||||
|
||||
if self.path.endswith('/'):
|
||||
self.path = self.path[:-1]
|
||||
|
||||
# Transform `params` into list
|
||||
self.params = self.kwargs.get('params') or []
|
||||
|
||||
if type(self.params) is not list:
|
||||
self.params = [self.params]
|
||||
|
||||
def transform_data(self):
|
||||
self.data = self.kwargs.get('data')
|
||||
|
||||
if self.data is None:
|
||||
return None
|
||||
|
||||
return json.dumps(self.data)
|
||||
|
||||
def transform_headers(self):
|
||||
self.headers = self.kwargs.get('headers') or {}
|
||||
|
||||
# Authentication
|
||||
self.headers['X-Plex-Token'] = self.client.configuration['authentication.token']
|
||||
|
||||
# Client
|
||||
self.headers['X-Plex-Client-Identifier'] = self.client.configuration['client.identifier']
|
||||
|
||||
self.headers['X-Plex-Product'] = self.client.configuration['client.product']
|
||||
self.headers['X-Plex-Version'] = self.client.configuration['client.version']
|
||||
|
||||
# Device
|
||||
self.headers['X-Device'] = self.client.configuration['device.system']
|
||||
self.headers['X-Device-Name'] = self.client.configuration['device.name']
|
||||
|
||||
# Platform
|
||||
self.headers['X-Platform'] = self.client.configuration['platform.name']
|
||||
self.headers['X-Platform-Version'] = self.client.configuration['platform.version']
|
||||
|
||||
# Update with extra headers from configuration
|
||||
c_headers = self.client.configuration['headers']
|
||||
|
||||
if c_headers:
|
||||
self.headers.update(c_headers)
|
||||
|
||||
# Only return headers with valid values
|
||||
return dict([
|
||||
(k, v) for (k, v) in self.headers.items()
|
||||
if v is not None
|
||||
])
|
||||
|
||||
def transform_method(self):
|
||||
self.method = self.kwargs.get('method')
|
||||
|
||||
# Pick `method` (if not provided)
|
||||
if not self.method:
|
||||
self.method = 'POST' if self.data else 'GET'
|
||||
|
||||
return self.method
|
|
@ -1,17 +0,0 @@
|
|||
import jsonpickle
|
||||
|
||||
|
||||
class Serializer(object):
|
||||
@classmethod
|
||||
def encode(cls, value):
|
||||
return jsonpickle.encode(value)
|
||||
|
||||
@classmethod
|
||||
def decode(cls, value, client=None):
|
||||
try:
|
||||
result = jsonpickle.decode(value)
|
||||
result.client = client
|
||||
|
||||
return result
|
||||
except:
|
||||
return None
|
|
@ -1,15 +0,0 @@
|
|||
import logging
|
||||
import traceback
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__version__ = '0.7.1'
|
||||
|
||||
|
||||
try:
|
||||
from plex_activity import activity
|
||||
|
||||
# Global objects (using defaults)
|
||||
Activity = activity.Activity()
|
||||
except Exception as ex:
|
||||
log.warn('Unable to import submodules: %s - %s', ex, traceback.format_exc())
|
|
@ -1,96 +0,0 @@
|
|||
from plex.lib import six as six
|
||||
from plex.lib.six.moves import xrange
|
||||
from plex_activity.sources import Logging, WebSocket
|
||||
|
||||
from pyemitter import Emitter
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ActivityMeta(type):
|
||||
def __getitem__(self, key):
|
||||
for (weight, source) in self.registered:
|
||||
if source.name == key:
|
||||
return source
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@six.add_metaclass(ActivityMeta)
|
||||
class Activity(Emitter):
|
||||
registered = []
|
||||
|
||||
def __init__(self, sources=None):
|
||||
self.available = self.get_available(sources)
|
||||
self.enabled = []
|
||||
|
||||
def start(self, sources=None):
|
||||
# TODO async start
|
||||
|
||||
if sources is not None:
|
||||
self.available = self.get_available(sources)
|
||||
|
||||
# Test methods until an available method is found
|
||||
for weight, source in self.available:
|
||||
if weight is None:
|
||||
# None = always start
|
||||
self.start_source(source)
|
||||
elif source.test():
|
||||
# Test passed
|
||||
self.start_source(source)
|
||||
else:
|
||||
log.info('activity source "%s" is not available', source.name)
|
||||
|
||||
log.info(
|
||||
'Finished starting %s method(s): %s',
|
||||
len(self.enabled),
|
||||
', '.join([('"%s"' % source.name) for source in self.enabled])
|
||||
)
|
||||
|
||||
def start_source(self, source):
|
||||
instance = source(self)
|
||||
instance.start()
|
||||
|
||||
self.enabled.append(instance)
|
||||
|
||||
def __getitem__(self, key):
|
||||
for (weight, source) in self.registered:
|
||||
if source.name == key:
|
||||
return source
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_available(cls, sources):
|
||||
if sources:
|
||||
return [
|
||||
(weight, source) for (weight, source) in cls.registered
|
||||
if source.name in sources
|
||||
]
|
||||
|
||||
return cls.registered
|
||||
|
||||
@classmethod
|
||||
def register(cls, source, weight=None):
|
||||
item = (weight, source)
|
||||
|
||||
# weight = None, highest priority
|
||||
if weight is None:
|
||||
cls.registered.insert(0, item)
|
||||
return
|
||||
|
||||
# insert in DESC order
|
||||
for x in xrange(len(cls.registered)):
|
||||
w, _ = cls.registered[x]
|
||||
|
||||
if w is not None and w < weight:
|
||||
cls.registered.insert(x, item)
|
||||
return
|
||||
|
||||
# otherwise append
|
||||
cls.registered.append(item)
|
||||
|
||||
# Register activity sources
|
||||
Activity.register(WebSocket)
|
||||
Activity.register(Logging, weight=1)
|
|
@ -1,44 +0,0 @@
|
|||
def str_format(s, *args, **kwargs):
|
||||
"""Return a formatted version of S, using substitutions from args and kwargs.
|
||||
|
||||
(Roughly matches the functionality of str.format but ensures compatibility with Python 2.5)
|
||||
"""
|
||||
|
||||
args = list(args)
|
||||
|
||||
x = 0
|
||||
while x < len(s):
|
||||
# Skip non-start token characters
|
||||
if s[x] != '{':
|
||||
x += 1
|
||||
continue
|
||||
|
||||
end_pos = s.find('}', x)
|
||||
|
||||
# If end character can't be found, move to next character
|
||||
if end_pos == -1:
|
||||
x += 1
|
||||
continue
|
||||
|
||||
name = s[x + 1:end_pos]
|
||||
|
||||
# Ensure token name is alpha numeric
|
||||
if not name.isalnum():
|
||||
x += 1
|
||||
continue
|
||||
|
||||
# Try find value for token
|
||||
value = args.pop(0) if args else kwargs.get(name)
|
||||
|
||||
if value:
|
||||
value = str(value)
|
||||
|
||||
# Replace token with value
|
||||
s = s[:x] + value + s[end_pos + 1:]
|
||||
|
||||
# Update current position
|
||||
x = x + len(value) - 1
|
||||
|
||||
x += 1
|
||||
|
||||
return s
|
|
@ -1,4 +0,0 @@
|
|||
from plex_activity.sources.s_logging import Logging
|
||||
from plex_activity.sources.s_websocket import WebSocket
|
||||
|
||||
__all__ = ['Logging', 'WebSocket']
|
|
@ -1,24 +0,0 @@
|
|||
from pyemitter import Emitter
|
||||
from threading import Thread
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Source(Emitter):
|
||||
name = None
|
||||
|
||||
def __init__(self):
|
||||
self.thread = Thread(target=self._run_wrapper)
|
||||
|
||||
def start(self):
|
||||
self.thread.start()
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
def _run_wrapper(self):
|
||||
try:
|
||||
self.run()
|
||||
except Exception as ex:
|
||||
log.error('Exception raised in "%s" activity source: %s', self.name, ex, exc_info=True)
|
|
@ -1,3 +0,0 @@
|
|||
from plex_activity.sources.s_logging.main import Logging
|
||||
|
||||
__all__ = ['Logging']
|
|
@ -1,249 +0,0 @@
|
|||
from plex import Plex
|
||||
from plex_activity.sources.base import Source
|
||||
from plex_activity.sources.s_logging.parsers import NowPlayingParser, ScrobbleParser
|
||||
|
||||
from asio import ASIO
|
||||
from asio.file import SEEK_ORIGIN_CURRENT
|
||||
from io import BufferedReader
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import time
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
PATH_HINTS = {
|
||||
'Darwin': [
|
||||
lambda: os.path.join(os.getenv('HOME'), 'Library/Logs/Plex Media Server.log')
|
||||
],
|
||||
'FreeBSD': [
|
||||
# FreeBSD
|
||||
'/usr/local/plexdata/Plex Media Server/Logs/Plex Media Server.log',
|
||||
'/usr/local/plexdata-plexpass/Plex Media Server/Logs/Plex Media Server.log',
|
||||
|
||||
# FreeNAS
|
||||
'/usr/pbi/plexmediaserver-amd64/plexdata/Plex Media Server/Logs/Plex Media Server.log',
|
||||
'/var/db/plexdata/Plex Media Server/Logs/Plex Media Server.log',
|
||||
'/var/db/plexdata-plexpass/Plex Media Server/Logs/Plex Media Server.log'
|
||||
],
|
||||
'Linux': [
|
||||
# QNAP
|
||||
'/share/HDA_DATA/.qpkg/PlexMediaServer/Library/Plex Media Server/Logs/Plex Media Server.log',
|
||||
|
||||
# Debian
|
||||
'/var/lib/plexmediaserver/Library/Application Support/Plex Media Server/Logs/Plex Media Server.log'
|
||||
],
|
||||
'Windows': [
|
||||
lambda: os.path.join(os.getenv('LOCALAPPDATA'), 'Plex Media Server\\Logs\\Plex Media Server.log')
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class Logging(Source):
|
||||
name = 'logging'
|
||||
events = [
|
||||
'logging.playing',
|
||||
'logging.action.played',
|
||||
'logging.action.unplayed'
|
||||
]
|
||||
|
||||
parsers = []
|
||||
|
||||
path = None
|
||||
path_hints = PATH_HINTS
|
||||
|
||||
def __init__(self, activity):
|
||||
super(Logging, self).__init__()
|
||||
|
||||
self.parsers = [p(self) for p in Logging.parsers]
|
||||
|
||||
self.file = None
|
||||
self.reader = None
|
||||
|
||||
self.path = None
|
||||
|
||||
# Pipe events to the main activity instance
|
||||
self.pipe(self.events, activity)
|
||||
|
||||
def run(self):
|
||||
line = self.read_line_retry(ping=True, stale_sleep=0.5)
|
||||
if not line:
|
||||
log.info('Unable to read log file')
|
||||
return
|
||||
|
||||
log.debug('Ready')
|
||||
|
||||
while True:
|
||||
# Grab the next line of the log
|
||||
line = self.read_line_retry(ping=True)
|
||||
|
||||
if line:
|
||||
self.process(line)
|
||||
else:
|
||||
log.info('Unable to read log file')
|
||||
|
||||
def process(self, line):
|
||||
for parser in self.parsers:
|
||||
if parser.process(line):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def read_line(self):
|
||||
if not self.file:
|
||||
path = self.get_path()
|
||||
if not path:
|
||||
raise Exception('Unable to find the location of "Plex Media Server.log"')
|
||||
|
||||
# Open file
|
||||
self.file = ASIO.open(path, opener=False)
|
||||
self.file.seek(self.file.get_size(), SEEK_ORIGIN_CURRENT)
|
||||
|
||||
# Create buffered reader
|
||||
self.reader = BufferedReader(self.file)
|
||||
|
||||
self.path = self.file.get_path()
|
||||
log.info('Opened file path: "%s"' % self.path)
|
||||
|
||||
return self.reader.readline()
|
||||
|
||||
def read_line_retry(self, timeout=60, ping=False, stale_sleep=1.0):
|
||||
line = None
|
||||
stale_since = None
|
||||
|
||||
while not line:
|
||||
line = self.read_line()
|
||||
|
||||
if line:
|
||||
stale_since = None
|
||||
time.sleep(0.05)
|
||||
break
|
||||
|
||||
if stale_since is None:
|
||||
stale_since = time.time()
|
||||
time.sleep(stale_sleep)
|
||||
continue
|
||||
elif (time.time() - stale_since) > timeout:
|
||||
return None
|
||||
elif (time.time() - stale_since) > timeout / 2:
|
||||
# Nothing returned for 5 seconds
|
||||
if self.file.get_path() != self.path:
|
||||
log.debug("Log file moved (probably rotated), closing")
|
||||
self.close()
|
||||
elif ping:
|
||||
# Ping server to see if server is still active
|
||||
Plex.detail()
|
||||
ping = False
|
||||
|
||||
time.sleep(stale_sleep)
|
||||
|
||||
return line
|
||||
|
||||
def close(self):
|
||||
if not self.file:
|
||||
return
|
||||
|
||||
try:
|
||||
# Close the buffered reader
|
||||
self.reader.close()
|
||||
except Exception as ex:
|
||||
log.error('reader.close() - raised exception: %s', ex, exc_info=True)
|
||||
finally:
|
||||
self.reader = None
|
||||
|
||||
try:
|
||||
# Close the file handle
|
||||
self.file.close()
|
||||
except OSError as ex:
|
||||
if ex.errno == 9:
|
||||
# Bad file descriptor, already closed?
|
||||
log.info('file.close() - ignoring raised exception: %s (already closed)', ex)
|
||||
else:
|
||||
log.error('file.close() - raised exception: %s', ex, exc_info=True)
|
||||
except Exception as ex:
|
||||
log.error('file.close() - raised exception: %s', ex, exc_info=True)
|
||||
finally:
|
||||
self.file = None
|
||||
|
||||
@classmethod
|
||||
def get_path(cls):
|
||||
if cls.path:
|
||||
return cls.path
|
||||
|
||||
hints = cls.get_hints()
|
||||
|
||||
log.debug('hints: %r', hints)
|
||||
|
||||
if not hints:
|
||||
log.error('Unable to find any hints for "%s", operating system not supported', platform.system())
|
||||
return None
|
||||
|
||||
for hint in hints:
|
||||
log.debug('Testing if "%s" exists', hint)
|
||||
|
||||
if os.path.exists(hint):
|
||||
cls.path = hint
|
||||
break
|
||||
|
||||
if cls.path:
|
||||
log.debug('Using the path: %r', cls.path)
|
||||
else:
|
||||
log.error('Unable to find a valid path for "Plex Media Server.log"', extra={
|
||||
'data': {
|
||||
'hints': hints
|
||||
}
|
||||
})
|
||||
|
||||
return cls.path
|
||||
|
||||
@classmethod
|
||||
def add_hint(cls, path, system=None):
|
||||
if system not in cls.path_hints:
|
||||
cls.path_hints[system] = []
|
||||
|
||||
cls.path_hints[system].append(path)
|
||||
|
||||
@classmethod
|
||||
def get_hints(cls):
|
||||
# Retrieve system hints
|
||||
hints_system = PATH_HINTS.get(platform.system(), [])
|
||||
|
||||
# Retrieve global hints
|
||||
hints_global = PATH_HINTS.get(None, [])
|
||||
|
||||
# Retrieve hint from server preferences (if available)
|
||||
data_path = Plex[':/prefs'].get('LocalAppDataPath')
|
||||
|
||||
if data_path:
|
||||
hints_global.append(os.path.join(data_path.value, "Plex Media Server", "Logs", "Plex Media Server.log"))
|
||||
else:
|
||||
log.info('Unable to retrieve "LocalAppDataPath" from server')
|
||||
|
||||
hints = []
|
||||
|
||||
for hint in (hints_global + hints_system):
|
||||
# Resolve hint function
|
||||
if inspect.isfunction(hint):
|
||||
hint = hint()
|
||||
|
||||
# Check for duplicate
|
||||
if hint in hints:
|
||||
continue
|
||||
|
||||
hints.append(hint)
|
||||
|
||||
return hints
|
||||
|
||||
@classmethod
|
||||
def test(cls):
|
||||
# TODO "Logging" source testing
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def register(cls, parser):
|
||||
cls.parsers.append(parser)
|
||||
|
||||
|
||||
Logging.register(NowPlayingParser)
|
||||
Logging.register(ScrobbleParser)
|
|
@ -1,4 +0,0 @@
|
|||
from plex_activity.sources.s_logging.parsers.now_playing import NowPlayingParser
|
||||
from plex_activity.sources.s_logging.parsers.scrobble import ScrobbleParser
|
||||
|
||||
__all__ = ['NowPlayingParser', 'ScrobbleParser']
|
|
@ -1,96 +0,0 @@
|
|||
from plex.lib.six.moves import urllib_parse as urlparse
|
||||
from plex_activity.core.helpers import str_format
|
||||
|
||||
from pyemitter import Emitter
|
||||
import logging
|
||||
import re
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
LOG_PATTERN = r'^.*?\[\w+\]\s\w+\s-\s{message}$'
|
||||
REQUEST_HEADER_PATTERN = str_format(LOG_PATTERN, message=r"Request: (\[(?P<address>.*?):(?P<port>\d+)[^]]*\]\s)?{method} {path}.*?")
|
||||
|
||||
IGNORE_PATTERNS = [
|
||||
r'error parsing allowedNetworks.*?',
|
||||
r'Comparing request from.*?',
|
||||
r'(Auth: )?We found auth token (.*?), enabling token-based authentication\.',
|
||||
r'(Auth: )?Came in with a super-token, authorization succeeded\.',
|
||||
r'(Auth: )?Refreshing tokens inside the token-based authentication filter\.',
|
||||
r'\[Now\] Updated play state for .*?',
|
||||
r'Play progress on .*? - got played .*? ms by account .*?!',
|
||||
r'(Statistics: )?\(.*?\) Reporting active playback in state \d+ of type \d+ \(.*?\) for account \d+',
|
||||
r'Request: \[.*?\] (GET|PUT) /video/:/transcode/.*?',
|
||||
r'Received transcode session ping for session .*?'
|
||||
]
|
||||
|
||||
IGNORE_REGEX = re.compile(str_format(LOG_PATTERN, message='(%s)' % ('|'.join('(%s)' % x for x in IGNORE_PATTERNS))), re.IGNORECASE)
|
||||
|
||||
|
||||
PARAM_REGEX = re.compile(str_format(LOG_PATTERN, message=r' \* (?P<key>.*?) =\> (?P<value>.*?)'), re.IGNORECASE)
|
||||
|
||||
|
||||
class Parser(Emitter):
|
||||
def __init__(self, core):
|
||||
self.core = core
|
||||
|
||||
def read_parameters(self, *match_functions):
|
||||
match_functions = [self.parameter_match] + list(match_functions)
|
||||
|
||||
info = {}
|
||||
|
||||
while True:
|
||||
line = self.core.read_line_retry(timeout=5)
|
||||
if not line:
|
||||
log.info('Unable to read log file')
|
||||
return {}
|
||||
|
||||
# Run through each match function to find a result
|
||||
match = None
|
||||
for func in match_functions:
|
||||
match = func(line)
|
||||
|
||||
if match is not None:
|
||||
break
|
||||
|
||||
# Update info dict with result, otherwise finish reading
|
||||
if match:
|
||||
info.update(match)
|
||||
elif match is None and IGNORE_REGEX.match(line.strip()) is None:
|
||||
log.debug('break on "%s"', line.strip())
|
||||
break
|
||||
|
||||
return info
|
||||
|
||||
def process(self, line):
|
||||
raise NotImplementedError()
|
||||
|
||||
@staticmethod
|
||||
def parameter_match(line):
|
||||
match = PARAM_REGEX.match(line.strip())
|
||||
if not match:
|
||||
return None
|
||||
|
||||
match = match.groupdict()
|
||||
|
||||
return {match['key']: match['value']}
|
||||
|
||||
@staticmethod
|
||||
def regex_match(regex, line):
|
||||
match = regex.match(line.strip())
|
||||
if not match:
|
||||
return None
|
||||
|
||||
return match.groupdict()
|
||||
|
||||
@staticmethod
|
||||
def query(match, value):
|
||||
if not value:
|
||||
return
|
||||
|
||||
try:
|
||||
parameters = urlparse.parse_qsl(value, strict_parsing=True)
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
for key, value in parameters:
|
||||
match.setdefault(key, value)
|
|
@ -1,116 +0,0 @@
|
|||
from plex_activity.core.helpers import str_format
|
||||
from plex_activity.sources.s_logging.parsers.base import Parser, LOG_PATTERN, REQUEST_HEADER_PATTERN
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
PLAYING_HEADER_PATTERN = str_format(REQUEST_HEADER_PATTERN, method="GET", path="/:/(?P<type>timeline|progress)/?(?:\?(?P<query>.*?))?\s")
|
||||
PLAYING_HEADER_REGEX = re.compile(PLAYING_HEADER_PATTERN, re.IGNORECASE)
|
||||
|
||||
RANGE_REGEX = re.compile(str_format(LOG_PATTERN, message=r'Request range: \d+ to \d+'), re.IGNORECASE)
|
||||
CLIENT_REGEX = re.compile(str_format(LOG_PATTERN, message=r'Client \[(?P<machineIdentifier>.*?)\].*?'), re.IGNORECASE)
|
||||
|
||||
NOW_USER_REGEX = re.compile(str_format(LOG_PATTERN, message=r'\[Now\] User is (?P<user_name>.+) \(ID: (?P<user_id>\d+)\)'), re.IGNORECASE)
|
||||
NOW_CLIENT_REGEX = re.compile(str_format(LOG_PATTERN, message=r'\[Now\] Device is (?P<product>.+?) \((?P<client>.+)\)\.'), re.IGNORECASE)
|
||||
|
||||
|
||||
class NowPlayingParser(Parser):
|
||||
required_info = [
|
||||
'ratingKey',
|
||||
'state', 'time'
|
||||
]
|
||||
|
||||
extra_info = [
|
||||
'duration',
|
||||
|
||||
'user_name', 'user_id',
|
||||
'machineIdentifier', 'client'
|
||||
]
|
||||
|
||||
events = [
|
||||
'logging.playing'
|
||||
]
|
||||
|
||||
def __init__(self, main):
|
||||
super(NowPlayingParser, self).__init__(main)
|
||||
|
||||
# Pipe events to the main logging activity instance
|
||||
self.pipe(self.events, main)
|
||||
|
||||
def process(self, line):
|
||||
header_match = PLAYING_HEADER_REGEX.match(line)
|
||||
if not header_match:
|
||||
return False
|
||||
|
||||
activity_type = header_match.group('type')
|
||||
|
||||
# Get a match from the activity entries
|
||||
if activity_type == 'timeline':
|
||||
match = self.timeline()
|
||||
elif activity_type == 'progress':
|
||||
match = self.progress()
|
||||
else:
|
||||
log.warn('Unknown activity type "%s"', activity_type)
|
||||
return True
|
||||
|
||||
print match, activity_type
|
||||
|
||||
if match is None:
|
||||
match = {}
|
||||
|
||||
# Extend match with query info
|
||||
self.query(match, header_match.group('query'))
|
||||
|
||||
# Ensure we successfully matched a result
|
||||
if not match:
|
||||
return True
|
||||
|
||||
# Sanitize the activity result
|
||||
info = {
|
||||
'address': header_match.group('address'),
|
||||
'port': header_match.group('port')
|
||||
}
|
||||
|
||||
# - Get required info parameters
|
||||
for key in self.required_info:
|
||||
if key in match and match[key] is not None:
|
||||
info[key] = match[key]
|
||||
else:
|
||||
log.info('Invalid activity match, missing key %s (matched keys: %s)', key, match.keys())
|
||||
return True
|
||||
|
||||
# - Add in any extra info parameters
|
||||
for key in self.extra_info:
|
||||
if key in match:
|
||||
info[key] = match[key]
|
||||
else:
|
||||
info[key] = None
|
||||
|
||||
# Update the scrobbler with the current state
|
||||
self.emit('logging.playing', info)
|
||||
return True
|
||||
|
||||
def timeline(self):
|
||||
return self.read_parameters(
|
||||
lambda line: self.regex_match(CLIENT_REGEX, line),
|
||||
lambda line: self.regex_match(RANGE_REGEX, line),
|
||||
|
||||
# [Now]* entries
|
||||
lambda line: self.regex_match(NOW_USER_REGEX, line),
|
||||
lambda line: self.regex_match(NOW_CLIENT_REGEX, line),
|
||||
)
|
||||
|
||||
def progress(self):
|
||||
data = self.read_parameters()
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
|
||||
# Translate parameters into timeline-style form
|
||||
return {
|
||||
'state': data.get('state'),
|
||||
'ratingKey': data.get('key'),
|
||||
'time': data.get('time')
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
from plex_activity.core.helpers import str_format
|
||||
from plex_activity.sources.s_logging.parsers.base import Parser, LOG_PATTERN
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class ScrobbleParser(Parser):
|
||||
pattern = str_format(LOG_PATTERN, message=r'Library item (?P<rating_key>\d+) \'(?P<title>.*?)\' got (?P<action>(?:un)?played) by account (?P<account_key>\d+)!.*?')
|
||||
regex = re.compile(pattern, re.IGNORECASE)
|
||||
|
||||
events = [
|
||||
'logging.action.played',
|
||||
'logging.action.unplayed'
|
||||
]
|
||||
|
||||
def __init__(self, main):
|
||||
super(ScrobbleParser, self).__init__(main)
|
||||
|
||||
# Pipe events to the main logging activity instance
|
||||
self.pipe(self.events, main)
|
||||
|
||||
def process(self, line):
|
||||
match = self.regex.match(line)
|
||||
if not match:
|
||||
return False
|
||||
|
||||
action = match.group('action')
|
||||
if not action:
|
||||
return False
|
||||
|
||||
self.emit('logging.action.%s' % action, {
|
||||
'account_key': match.group('account_key'),
|
||||
'rating_key': match.group('rating_key'),
|
||||
|
||||
'title': match.group('title')
|
||||
})
|
||||
|
||||
return True
|
|
@ -1,3 +0,0 @@
|
|||
from plex_activity.sources.s_websocket.main import WebSocket
|
||||
|
||||
__all__ = ['WebSocket']
|
|
@ -1,298 +0,0 @@
|
|||
from plex import Plex
|
||||
from plex.lib.six.moves.urllib_parse import urlencode
|
||||
from plex_activity.sources.base import Source
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
import websocket
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SCANNING_REGEX = re.compile('Scanning the "(?P<section>.*?)" section', re.IGNORECASE)
|
||||
SCAN_COMPLETE_REGEX = re.compile('Library scan complete', re.IGNORECASE)
|
||||
|
||||
TIMELINE_STATES = {
|
||||
0: 'created',
|
||||
2: 'matching',
|
||||
3: 'downloading',
|
||||
4: 'loading',
|
||||
5: 'finished',
|
||||
6: 'analyzing',
|
||||
9: 'deleted'
|
||||
}
|
||||
|
||||
|
||||
class WebSocket(Source):
|
||||
name = 'websocket'
|
||||
events = [
|
||||
'websocket.playing',
|
||||
|
||||
'websocket.scanner.started',
|
||||
'websocket.scanner.progress',
|
||||
'websocket.scanner.finished',
|
||||
|
||||
'websocket.timeline.created',
|
||||
'websocket.timeline.matching',
|
||||
'websocket.timeline.downloading',
|
||||
'websocket.timeline.loading',
|
||||
'websocket.timeline.finished',
|
||||
'websocket.timeline.analyzing',
|
||||
'websocket.timeline.deleted'
|
||||
]
|
||||
|
||||
opcode_data = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
|
||||
|
||||
def __init__(self, activity):
|
||||
super(WebSocket, self).__init__()
|
||||
|
||||
self.ws = None
|
||||
self.reconnects = 0
|
||||
|
||||
# Pipe events to the main activity instance
|
||||
self.pipe(self.events, activity)
|
||||
|
||||
def connect(self):
|
||||
uri = 'ws://%s:%s/:/websockets/notifications' % (
|
||||
Plex.configuration.get('server.host', '127.0.0.1'),
|
||||
Plex.configuration.get('server.port', 32400)
|
||||
)
|
||||
|
||||
params = {}
|
||||
|
||||
# Set authentication token (if one is available)
|
||||
if Plex.configuration['authentication.token']:
|
||||
params['X-Plex-Token'] = Plex.configuration['authentication.token']
|
||||
|
||||
# Append parameters to uri
|
||||
if params:
|
||||
uri += '?' + urlencode(params)
|
||||
|
||||
# Create websocket connection
|
||||
self.ws = websocket.create_connection(uri)
|
||||
|
||||
def run(self):
|
||||
self.connect()
|
||||
|
||||
log.debug('Ready')
|
||||
|
||||
while True:
|
||||
try:
|
||||
self.process(*self.receive())
|
||||
|
||||
# successfully received data, reset reconnects counter
|
||||
self.reconnects = 0
|
||||
except websocket.WebSocketConnectionClosedException:
|
||||
if self.reconnects <= 5:
|
||||
self.reconnects += 1
|
||||
|
||||
# Increasing sleep interval between reconnections
|
||||
if self.reconnects > 1:
|
||||
time.sleep(2 * (self.reconnects - 1))
|
||||
|
||||
log.info('WebSocket connection has closed, reconnecting...')
|
||||
self.connect()
|
||||
else:
|
||||
log.error('WebSocket connection unavailable, activity monitoring not available')
|
||||
break
|
||||
|
||||
def receive(self):
|
||||
frame = self.ws.recv_frame()
|
||||
|
||||
if not frame:
|
||||
raise websocket.WebSocketException("Not a valid frame %s" % frame)
|
||||
elif frame.opcode in self.opcode_data:
|
||||
return frame.opcode, frame.data
|
||||
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
|
||||
self.ws.send_close()
|
||||
return frame.opcode, None
|
||||
elif frame.opcode == websocket.ABNF.OPCODE_PING:
|
||||
self.ws.pong("Hi!")
|
||||
|
||||
return None, None
|
||||
|
||||
def process(self, opcode, data):
|
||||
if opcode not in self.opcode_data:
|
||||
return False
|
||||
|
||||
try:
|
||||
info = json.loads(data)
|
||||
except UnicodeDecodeError as ex:
|
||||
log.warn('Error decoding message from websocket: %s' % ex, extra={
|
||||
'event': {
|
||||
'module': __name__,
|
||||
'name': 'process.loads.unicode_decode_error',
|
||||
'key': '%s:%s' % (ex.encoding, ex.reason)
|
||||
}
|
||||
})
|
||||
log.debug(data)
|
||||
return False
|
||||
except Exception as ex:
|
||||
log.warn('Error decoding message from websocket: %s' % ex, extra={
|
||||
'event': {
|
||||
'module': __name__,
|
||||
'name': 'process.load_exception',
|
||||
'key': ex.message
|
||||
}
|
||||
})
|
||||
log.debug(data)
|
||||
return False
|
||||
|
||||
# Handle modern messages (PMS 1.3.0+)
|
||||
if type(info.get('NotificationContainer')) is dict:
|
||||
info = info['NotificationContainer']
|
||||
|
||||
# Process message
|
||||
m_type = info.get('type')
|
||||
|
||||
if not m_type:
|
||||
log.debug('Received message with no "type" parameter: %r', info)
|
||||
return False
|
||||
|
||||
# Pre-process message (if function exists)
|
||||
process_func = getattr(self, 'process_%s' % m_type, None)
|
||||
|
||||
if process_func and process_func(info):
|
||||
return True
|
||||
|
||||
# Emit raw message
|
||||
return self.emit_notification('%s.notification.%s' % (self.name, m_type), info)
|
||||
|
||||
def process_playing(self, info):
|
||||
children = info.get('_children') or info.get('PlaySessionStateNotification')
|
||||
|
||||
if not children:
|
||||
log.debug('Received "playing" message with no children: %r', info)
|
||||
return False
|
||||
|
||||
return self.emit_notification('%s.playing' % self.name, children)
|
||||
|
||||
def process_progress(self, info):
|
||||
children = info.get('_children') or info.get('ProgressNotification')
|
||||
|
||||
if not children:
|
||||
log.debug('Received "progress" message with no children: %r', info)
|
||||
return False
|
||||
|
||||
for notification in children:
|
||||
self.emit('%s.scanner.progress' % self.name, {
|
||||
'message': notification.get('message')
|
||||
})
|
||||
|
||||
return True
|
||||
|
||||
def process_status(self, info):
|
||||
children = info.get('_children') or info.get('StatusNotification')
|
||||
|
||||
if not children:
|
||||
log.debug('Received "status" message with no children: %r', info)
|
||||
return False
|
||||
|
||||
# Process children
|
||||
count = 0
|
||||
|
||||
for notification in children:
|
||||
title = notification.get('title')
|
||||
|
||||
if not title:
|
||||
continue
|
||||
|
||||
# Scan complete message
|
||||
if SCAN_COMPLETE_REGEX.match(title):
|
||||
self.emit('%s.scanner.finished' % self.name)
|
||||
count += 1
|
||||
continue
|
||||
|
||||
# Scanning message
|
||||
match = SCANNING_REGEX.match(title)
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
section = match.group('section')
|
||||
|
||||
if not section:
|
||||
continue
|
||||
|
||||
self.emit('%s.scanner.started' % self.name, {'section': section})
|
||||
count += 1
|
||||
|
||||
# Validate result
|
||||
if count < 1:
|
||||
log.debug('Received "status" message with no valid children: %r', info)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def process_timeline(self, info):
|
||||
children = info.get('_children') or info.get('TimelineEntry')
|
||||
|
||||
if not children:
|
||||
log.debug('Received "timeline" message with no children: %r', info)
|
||||
return False
|
||||
|
||||
# Process children
|
||||
count = 0
|
||||
|
||||
for entry in children:
|
||||
state = TIMELINE_STATES.get(entry.get('state'))
|
||||
|
||||
if not state:
|
||||
continue
|
||||
|
||||
self.emit('%s.timeline.%s' % (self.name, state), entry)
|
||||
count += 1
|
||||
|
||||
# Validate result
|
||||
if count < 1:
|
||||
log.debug('Received "timeline" message with no valid children: %r', info)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
#
|
||||
# Helpers
|
||||
#
|
||||
|
||||
def emit_notification(self, name, info=None):
|
||||
if info is None:
|
||||
info = {}
|
||||
|
||||
# Emit children
|
||||
children = self._get_children(info)
|
||||
|
||||
if children:
|
||||
for child in children:
|
||||
self.emit(name, child)
|
||||
|
||||
return True
|
||||
|
||||
# Emit objects
|
||||
if info:
|
||||
self.emit(name, info)
|
||||
else:
|
||||
self.emit(name)
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _get_children(info):
|
||||
if type(info) is list:
|
||||
return info
|
||||
|
||||
if type(info) is not dict:
|
||||
return None
|
||||
|
||||
# Return legacy children
|
||||
if info.get('_children'):
|
||||
return info['_children']
|
||||
|
||||
# Search for modern children container
|
||||
for key, value in info.items():
|
||||
key = key.lower()
|
||||
|
||||
if (key.endswith('entry') or key.endswith('notification')) and type(value) is list:
|
||||
return value
|
||||
|
||||
return None
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue