mirror of
https://github.com/morpheus65535/bazarr
synced 2025-03-03 10:06:24 +00:00
Merge remote-tracking branch 'origin/development' into halali
# Conflicts: # bazarr/get_episodes.py # bazarr/get_movies.py # bazarr/get_series.py # bazarr/get_subtitle.py # bazarr/main.py
This commit is contained in:
commit
2b564a060a
28 changed files with 2433 additions and 8 deletions
|
@ -25,7 +25,8 @@ def update_all_movies():
|
|||
|
||||
|
||||
def sync_episodes():
|
||||
logging.debug('BAZARR Starting episode sync from Sonarr.')
|
||||
q4ws.append('Episodes sync from Sonarr started...')
|
||||
logging.debug('BAZARR Starting episodes sync from Sonarr.')
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
|
||||
# Open database connection
|
||||
|
@ -41,12 +42,13 @@ def sync_episodes():
|
|||
episodes_to_add = []
|
||||
|
||||
# Get sonarrId for each series from database
|
||||
seriesIdList = c.execute("SELECT sonarrSeriesId FROM table_shows").fetchall()
|
||||
seriesIdList = c.execute("SELECT sonarrSeriesId, title FROM table_shows").fetchall()
|
||||
|
||||
# Close database connection
|
||||
c.close()
|
||||
|
||||
for seriesId in seriesIdList:
|
||||
q4ws.append('Getting episodes data for this show: ' + seriesId[1])
|
||||
# Get episodes data for a series from Sonarr
|
||||
url_sonarr_api_episode = url_sonarr + "/api/episode?seriesId=" + str(seriesId[0]) + "&apikey=" + apikey_sonarr
|
||||
try:
|
||||
|
@ -109,4 +111,6 @@ def sync_episodes():
|
|||
logging.debug('BAZARR All episodes synced from Sonarr into database.')
|
||||
|
||||
list_missing_subtitles()
|
||||
logging.debug('BAZARR All missing subtitles updated in database.')
|
||||
logging.debug('BAZARR All missing subtitles updated in database.')
|
||||
|
||||
q4ws.append('Episodes sync from Sonarr ended.')
|
||||
|
|
|
@ -3,6 +3,7 @@ import os
|
|||
import sqlite3
|
||||
import requests
|
||||
import logging
|
||||
from queueconfig import q4ws
|
||||
|
||||
from get_argv import config_dir
|
||||
from config import settings, url_radarr
|
||||
|
@ -11,6 +12,7 @@ from list_subtitles import store_subtitles_movie, list_missing_subtitles_movies
|
|||
|
||||
|
||||
def update_movies():
|
||||
q4ws.append("Update movies list from Radarr is running...")
|
||||
logging.debug('BAZARR Starting movie sync from Radarr.')
|
||||
apikey_radarr = settings.radarr.apikey
|
||||
movie_default_enabled = settings.general.getboolean('movie_default_enabled')
|
||||
|
@ -48,6 +50,7 @@ def update_movies():
|
|||
movies_to_add = []
|
||||
|
||||
for movie in r.json():
|
||||
q4ws.append("Getting data for this movie: " + movie['title'])
|
||||
if movie['hasFile'] is True:
|
||||
if 'movieFile' in movie:
|
||||
if movie["path"] != None and movie['movieFile']['relativePath'] != None:
|
||||
|
@ -125,6 +128,8 @@ def update_movies():
|
|||
list_missing_subtitles_movies()
|
||||
logging.debug('BAZARR All movie missing subtitles updated in database.')
|
||||
|
||||
q4ws.append("Update movies list from Radarr is ended.")
|
||||
|
||||
|
||||
def get_profile_list():
|
||||
apikey_radarr = settings.radarr.apikey
|
||||
|
|
|
@ -3,6 +3,8 @@ import os
|
|||
import sqlite3
|
||||
import requests
|
||||
import logging
|
||||
from queueconfig import q4ws
|
||||
import datetime
|
||||
|
||||
from get_argv import config_dir
|
||||
from config import settings, url_sonarr
|
||||
|
@ -10,6 +12,7 @@ from list_subtitles import list_missing_subtitles
|
|||
|
||||
|
||||
def update_series():
|
||||
q4ws.append("Update series list from Sonarr is running...")
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
serie_default_enabled = settings.general.getboolean('serie_default_enabled')
|
||||
serie_default_language = settings.general.serie_default_language
|
||||
|
@ -50,6 +53,7 @@ def update_series():
|
|||
series_to_add = []
|
||||
|
||||
for show in r.json():
|
||||
q4ws.append("Getting series data for this show: " + show['title'])
|
||||
try:
|
||||
overview = unicode(show['overview'])
|
||||
except:
|
||||
|
@ -104,6 +108,8 @@ def update_series():
|
|||
db.commit()
|
||||
db.close()
|
||||
|
||||
q4ws.append("Update series list from Sonarr is ended.")
|
||||
|
||||
|
||||
def get_profile_list():
|
||||
apikey_sonarr = settings.sonarr.apikey
|
||||
|
|
|
@ -25,6 +25,7 @@ from helper import path_replace, path_replace_movie, path_replace_reverse, \
|
|||
from list_subtitles import list_missing_subtitles, list_missing_subtitles_movies, store_subtitles, store_subtitles_movie
|
||||
from notifier import send_notifications, send_notifications_movie
|
||||
from utils import history_log, history_log_movie
|
||||
from queueconfig import q4ws
|
||||
|
||||
# configure the cache
|
||||
region.configure('dogpile.cache.memory')
|
||||
|
@ -425,6 +426,7 @@ def wanted_download_subtitles(path):
|
|||
for i in range(len(attempt)):
|
||||
if attempt[i][0] == language:
|
||||
if search_active(attempt[i][1]) is True:
|
||||
q4ws.append('Searching ' + str(language_from_alpha2(language)) + ' subtitles for this file: ' + path)
|
||||
message = download_subtitle(path_replace(episode[0]), str(alpha3_from_alpha2(language)), episode[4], providers_list, providers_auth, str(episode[5]), 'series')
|
||||
if message is not None:
|
||||
store_subtitles(path_replace(episode[0]))
|
||||
|
@ -466,6 +468,7 @@ def wanted_download_subtitles_movie(path):
|
|||
for i in range(len(attempt)):
|
||||
if attempt[i][0] == language:
|
||||
if search_active(attempt[i][1]) is True:
|
||||
q4ws.append('Searching ' + str(language_from_alpha2(language)) + ' subtitles for this file: ' + path)
|
||||
message = download_subtitle(path_replace_movie(movie[0]), str(alpha3_from_alpha2(language)), movie[4], providers_list, providers_auth, str(movie[5]), 'movie')
|
||||
if message is not None:
|
||||
store_subtitles_movie(path_replace_movie(movie[0]))
|
||||
|
|
|
@ -16,6 +16,8 @@ from config import settings
|
|||
from helper import path_replace, path_replace_movie, path_replace_reverse, \
|
||||
path_replace_reverse_movie
|
||||
|
||||
from queueconfig import q4ws
|
||||
|
||||
gc.enable()
|
||||
|
||||
|
||||
|
@ -23,6 +25,7 @@ def store_subtitles(file):
|
|||
logging.debug('BAZARR started subtitles indexing for this file: ' + file)
|
||||
actual_subtitles = []
|
||||
if os.path.exists(file):
|
||||
q4ws.append('Analyzing this file for subtitles: ' + file)
|
||||
if os.path.splitext(file)[1] == '.mkv':
|
||||
logging.debug("BAZARR is trying to index embedded subtitles.")
|
||||
try:
|
||||
|
@ -95,6 +98,7 @@ def store_subtitles_movie(file):
|
|||
logging.debug('BAZARR started subtitles indexing for this file: ' + file)
|
||||
actual_subtitles = []
|
||||
if os.path.exists(file):
|
||||
q4ws.append('Analyzing this file for subtitles: ' + file)
|
||||
if os.path.splitext(file)[1] == '.mkv':
|
||||
logging.debug("BAZARR is trying to index embedded subtitles.")
|
||||
try:
|
||||
|
|
|
@ -78,6 +78,7 @@ def configure_logging(debug=False):
|
|||
logging.getLogger("guessit").setLevel(logging.WARNING)
|
||||
logging.getLogger("rebulk").setLevel(logging.WARNING)
|
||||
logging.getLogger("stevedore.extension").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("geventwebsocket.handler").setLevel(logging.WARNING)
|
||||
fh.setLevel(log_level)
|
||||
logger.addHandler(fh)
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
bazarr_version = '0.6.9.1'
|
||||
bazarr_version = '0.6.9.5'
|
||||
|
||||
import gc
|
||||
gc.enable()
|
||||
|
@ -18,6 +18,7 @@ from update_db import *
|
|||
from notifier import update_notifier
|
||||
update_notifier()
|
||||
|
||||
import queueconfig
|
||||
|
||||
import logging
|
||||
from logger import configure_logging, empty_log
|
||||
|
@ -36,7 +37,7 @@ if settings.proxy.type != 'None':
|
|||
os.environ['HTTPS_PROXY'] = str(proxy)
|
||||
os.environ['NO_PROXY'] = str(settings.proxy.exclude)
|
||||
|
||||
from bottle import route, run, template, static_file, request, redirect, response, HTTPError, app, hook
|
||||
from bottle import route, run, template, static_file, request, redirect, response, HTTPError, app, hook, abort
|
||||
import bottle
|
||||
bottle.TEMPLATE_PATH.insert(0, os.path.join(os.path.dirname(__file__), '../views/'))
|
||||
if "PYCHARM_HOSTED" in os.environ:
|
||||
|
@ -45,7 +46,24 @@ if "PYCHARM_HOSTED" in os.environ:
|
|||
else:
|
||||
bottle.ERROR_PAGE_TEMPLATE = bottle.ERROR_PAGE_TEMPLATE.replace('if DEBUG and', 'if')
|
||||
|
||||
from cherrypy.wsgiserver import CherryPyWSGIServer
|
||||
# Install gevent under user directory if it'S not already available. This one is required to use websocket.
|
||||
try:
|
||||
import gevent
|
||||
except ImportError as e:
|
||||
logging.exception('BAZARR require gevent Python module to be installed using pip.')
|
||||
try:
|
||||
stop_file = open(os.path.join(config_dir, "bazarr.stop"), "w")
|
||||
except Exception as e:
|
||||
logging.error('BAZARR Cannot create bazarr.stop file.')
|
||||
else:
|
||||
stop_file.write('')
|
||||
stop_file.close()
|
||||
os._exit(0)
|
||||
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from geventwebsocket import WebSocketError
|
||||
from geventwebsocket.handler import WebSocketHandler
|
||||
#from cherrypy.wsgiserver import CherryPyWSGIServer
|
||||
|
||||
from beaker.middleware import SessionMiddleware
|
||||
from cork import Cork
|
||||
|
@ -1732,14 +1750,31 @@ def test_notification(protocol, provider):
|
|||
)
|
||||
|
||||
|
||||
@route(base_url + 'websocket')
|
||||
@custom_auth_basic(check_credentials)
|
||||
def handle_websocket():
|
||||
wsock = request.environ.get('wsgi.websocket')
|
||||
if not wsock:
|
||||
abort(400, 'Expected WebSocket request.')
|
||||
|
||||
queueconfig.q4ws.clear()
|
||||
|
||||
while True:
|
||||
try:
|
||||
if len(queueconfig.q4ws) > 0:
|
||||
wsock.send(queueconfig.q4ws.popleft())
|
||||
gevent.sleep(0)
|
||||
except WebSocketError:
|
||||
break
|
||||
|
||||
import warnings
|
||||
# Mute DeprecationWarning
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
|
||||
server = CherryPyWSGIServer((str(settings.general.ip), int(settings.general.port)), app)
|
||||
server = WSGIServer((str(settings.general.ip), int(settings.general.port)), app, handler_class=WebSocketHandler)
|
||||
try:
|
||||
logging.info('BAZARR is started and waiting for request on http://' + str(settings.general.ip) + ':' + str(settings.general.port) + str(base_url))
|
||||
# print 'Bazarr is started and waiting for request on http://' + str(ip) + ':' + str(port) + str(base_url)
|
||||
server.start()
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
shutdown()
|
||||
|
|
4
bazarr/queueconfig.py
Normal file
4
bazarr/queueconfig.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from collections import deque
|
||||
|
||||
global q4ws
|
||||
q4ws = deque(maxlen=10)
|
21
libs/geventwebsocket/__init__.py
Normal file
21
libs/geventwebsocket/__init__.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
VERSION = (0, 10, 1, 'final', 0)
|
||||
|
||||
__all__ = [
|
||||
'WebSocketApplication',
|
||||
'Resource',
|
||||
'WebSocketServer',
|
||||
'WebSocketError',
|
||||
'get_version'
|
||||
]
|
||||
|
||||
|
||||
def get_version(*args, **kwargs):
|
||||
from .utils import get_version
|
||||
return get_version(*args, **kwargs)
|
||||
|
||||
try:
|
||||
from .resource import WebSocketApplication, Resource
|
||||
from .server import WebSocketServer
|
||||
from .exceptions import WebSocketError
|
||||
except ImportError:
|
||||
pass
|
23
libs/geventwebsocket/_compat.py
Normal file
23
libs/geventwebsocket/_compat.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY2 = sys.version_info[0] == 2
|
||||
|
||||
|
||||
if PY2:
|
||||
bytes = str
|
||||
text_type = unicode
|
||||
string_types = basestring
|
||||
range_type = xrange
|
||||
iteritems = lambda x: x.iteritems()
|
||||
# b = lambda x: x
|
||||
else:
|
||||
text_type = str
|
||||
string_types = str,
|
||||
range_type = range
|
||||
iteritems = lambda x: iter(x.items())
|
||||
# b = lambda x: codecs.latin_1_encode(x)[0]
|
19
libs/geventwebsocket/exceptions.py
Normal file
19
libs/geventwebsocket/exceptions.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from socket import error as socket_error
|
||||
|
||||
|
||||
class WebSocketError(socket_error):
|
||||
"""
|
||||
Base class for all websocket errors.
|
||||
"""
|
||||
|
||||
|
||||
class ProtocolError(WebSocketError):
|
||||
"""
|
||||
Raised if an error occurs when de/encoding the websocket protocol.
|
||||
"""
|
||||
|
||||
|
||||
class FrameTooLargeException(ProtocolError):
|
||||
"""
|
||||
Raised if a frame is received that is too large.
|
||||
"""
|
0
libs/geventwebsocket/gunicorn/__init__.py
Normal file
0
libs/geventwebsocket/gunicorn/__init__.py
Normal file
6
libs/geventwebsocket/gunicorn/workers.py
Normal file
6
libs/geventwebsocket/gunicorn/workers.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
from geventwebsocket.handler import WebSocketHandler
|
||||
from gunicorn.workers.ggevent import GeventPyWSGIWorker
|
||||
|
||||
|
||||
class GeventWebSocketWorker(GeventPyWSGIWorker):
|
||||
wsgi_handler = WebSocketHandler
|
283
libs/geventwebsocket/handler.py
Normal file
283
libs/geventwebsocket/handler.py
Normal file
|
@ -0,0 +1,283 @@
|
|||
import base64
|
||||
import hashlib
|
||||
|
||||
from gevent.pywsgi import WSGIHandler
|
||||
from ._compat import PY3
|
||||
from .websocket import WebSocket, Stream
|
||||
from .logging import create_logger
|
||||
|
||||
|
||||
class Client(object):
|
||||
def __init__(self, address, ws):
|
||||
self.address = address
|
||||
self.ws = ws
|
||||
|
||||
|
||||
class WebSocketHandler(WSGIHandler):
|
||||
"""
|
||||
Automatically upgrades the connection to a websocket.
|
||||
|
||||
To prevent the WebSocketHandler to call the underlying WSGI application,
|
||||
but only setup the WebSocket negotiations, do:
|
||||
|
||||
mywebsockethandler.prevent_wsgi_call = True
|
||||
|
||||
before calling run_application(). This is useful if you want to do more
|
||||
things before calling the app, and want to off-load the WebSocket
|
||||
negotiations to this library. Socket.IO needs this for example, to send
|
||||
the 'ack' before yielding the control to your WSGI app.
|
||||
"""
|
||||
|
||||
SUPPORTED_VERSIONS = ('13', '8', '7')
|
||||
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
def run_websocket(self):
|
||||
"""
|
||||
Called when a websocket has been created successfully.
|
||||
"""
|
||||
|
||||
if getattr(self, 'prevent_wsgi_call', False):
|
||||
return
|
||||
|
||||
# In case WebSocketServer is not used
|
||||
if not hasattr(self.server, 'clients'):
|
||||
self.server.clients = {}
|
||||
|
||||
# Since we're now a websocket connection, we don't care what the
|
||||
# application actually responds with for the http response
|
||||
|
||||
try:
|
||||
self.server.clients[self.client_address] = Client(
|
||||
self.client_address, self.websocket)
|
||||
list(self.application(self.environ, lambda s, h, e=None: []))
|
||||
finally:
|
||||
del self.server.clients[self.client_address]
|
||||
if not self.websocket.closed:
|
||||
self.websocket.close()
|
||||
self.environ.update({
|
||||
'wsgi.websocket': None
|
||||
})
|
||||
self.websocket = None
|
||||
|
||||
def run_application(self):
|
||||
if (hasattr(self.server, 'pre_start_hook') and self.server.pre_start_hook):
|
||||
self.logger.debug("Calling pre-start hook")
|
||||
if self.server.pre_start_hook(self):
|
||||
return super(WebSocketHandler, self).run_application()
|
||||
|
||||
self.logger.debug("Initializing WebSocket")
|
||||
self.result = self.upgrade_websocket()
|
||||
|
||||
if hasattr(self, 'websocket'):
|
||||
if self.status and not self.headers_sent:
|
||||
self.write('')
|
||||
|
||||
self.run_websocket()
|
||||
else:
|
||||
if self.status:
|
||||
# A status was set, likely an error so just send the response
|
||||
if not self.result:
|
||||
self.result = []
|
||||
|
||||
self.process_result()
|
||||
return
|
||||
|
||||
# This handler did not handle the request, so defer it to the
|
||||
# underlying application object
|
||||
return super(WebSocketHandler, self).run_application()
|
||||
|
||||
def upgrade_websocket(self):
|
||||
"""
|
||||
Attempt to upgrade the current environ into a websocket enabled
|
||||
connection. If successful, the environ dict with be updated with two
|
||||
new entries, `wsgi.websocket` and `wsgi.websocket_version`.
|
||||
|
||||
:returns: Whether the upgrade was successful.
|
||||
"""
|
||||
|
||||
# Some basic sanity checks first
|
||||
|
||||
self.logger.debug("Validating WebSocket request")
|
||||
|
||||
if self.environ.get('REQUEST_METHOD', '') != 'GET':
|
||||
# This is not a websocket request, so we must not handle it
|
||||
self.logger.debug('Can only upgrade connection if using GET method.')
|
||||
return
|
||||
|
||||
upgrade = self.environ.get('HTTP_UPGRADE', '').lower()
|
||||
|
||||
if upgrade == 'websocket':
|
||||
connection = self.environ.get('HTTP_CONNECTION', '').lower()
|
||||
|
||||
if 'upgrade' not in connection:
|
||||
# This is not a websocket request, so we must not handle it
|
||||
self.logger.warning("Client didn't ask for a connection "
|
||||
"upgrade")
|
||||
return
|
||||
else:
|
||||
# This is not a websocket request, so we must not handle it
|
||||
return
|
||||
|
||||
if self.request_version != 'HTTP/1.1':
|
||||
self.start_response('402 Bad Request', [])
|
||||
self.logger.warning("Bad server protocol in headers")
|
||||
|
||||
return ['Bad protocol version']
|
||||
|
||||
if self.environ.get('HTTP_SEC_WEBSOCKET_VERSION'):
|
||||
return self.upgrade_connection()
|
||||
else:
|
||||
self.logger.warning("No protocol defined")
|
||||
self.start_response('426 Upgrade Required', [
|
||||
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))])
|
||||
|
||||
return ['No Websocket protocol version defined']
|
||||
|
||||
def upgrade_connection(self):
|
||||
"""
|
||||
Validate and 'upgrade' the HTTP request to a WebSocket request.
|
||||
|
||||
If an upgrade succeeded then then handler will have `start_response`
|
||||
with a status of `101`, the environ will also be updated with
|
||||
`wsgi.websocket` and `wsgi.websocket_version` keys.
|
||||
|
||||
:param environ: The WSGI environ dict.
|
||||
:param start_response: The callable used to start the response.
|
||||
:param stream: File like object that will be read from/written to by
|
||||
the underlying WebSocket object, if created.
|
||||
:return: The WSGI response iterator is something went awry.
|
||||
"""
|
||||
|
||||
self.logger.debug("Attempting to upgrade connection")
|
||||
|
||||
version = self.environ.get("HTTP_SEC_WEBSOCKET_VERSION")
|
||||
|
||||
if version not in self.SUPPORTED_VERSIONS:
|
||||
msg = "Unsupported WebSocket Version: {0}".format(version)
|
||||
|
||||
self.logger.warning(msg)
|
||||
self.start_response('400 Bad Request', [
|
||||
('Sec-WebSocket-Version', ', '.join(self.SUPPORTED_VERSIONS))
|
||||
])
|
||||
|
||||
return [msg]
|
||||
|
||||
key = self.environ.get("HTTP_SEC_WEBSOCKET_KEY", '').strip()
|
||||
|
||||
if not key:
|
||||
# 5.2.1 (3)
|
||||
msg = "Sec-WebSocket-Key header is missing/empty"
|
||||
|
||||
self.logger.warning(msg)
|
||||
self.start_response('400 Bad Request', [])
|
||||
|
||||
return [msg]
|
||||
|
||||
try:
|
||||
key_len = len(base64.b64decode(key))
|
||||
except TypeError:
|
||||
msg = "Invalid key: {0}".format(key)
|
||||
|
||||
self.logger.warning(msg)
|
||||
self.start_response('400 Bad Request', [])
|
||||
|
||||
return [msg]
|
||||
|
||||
if key_len != 16:
|
||||
# 5.2.1 (3)
|
||||
msg = "Invalid key: {0}".format(key)
|
||||
|
||||
self.logger.warning(msg)
|
||||
self.start_response('400 Bad Request', [])
|
||||
|
||||
return [msg]
|
||||
|
||||
# Check for WebSocket Protocols
|
||||
requested_protocols = self.environ.get(
|
||||
'HTTP_SEC_WEBSOCKET_PROTOCOL', '')
|
||||
protocol = None
|
||||
|
||||
if hasattr(self.application, 'app_protocol'):
|
||||
allowed_protocol = self.application.app_protocol(
|
||||
self.environ['PATH_INFO'])
|
||||
|
||||
if allowed_protocol and allowed_protocol in requested_protocols:
|
||||
protocol = allowed_protocol
|
||||
self.logger.debug("Protocol allowed: {0}".format(protocol))
|
||||
|
||||
self.websocket = WebSocket(self.environ, Stream(self), self)
|
||||
self.environ.update({
|
||||
'wsgi.websocket_version': version,
|
||||
'wsgi.websocket': self.websocket
|
||||
})
|
||||
|
||||
if PY3:
|
||||
accept = base64.b64encode(
|
||||
hashlib.sha1((key + self.GUID).encode("latin-1")).digest()
|
||||
).decode("latin-1")
|
||||
else:
|
||||
accept = base64.b64encode(hashlib.sha1(key + self.GUID).digest())
|
||||
|
||||
headers = [
|
||||
("Upgrade", "websocket"),
|
||||
("Connection", "Upgrade"),
|
||||
("Sec-WebSocket-Accept", accept)
|
||||
]
|
||||
|
||||
if protocol:
|
||||
headers.append(("Sec-WebSocket-Protocol", protocol))
|
||||
|
||||
self.logger.debug("WebSocket request accepted, switching protocols")
|
||||
self.start_response("101 Switching Protocols", headers)
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
if not hasattr(self.server, 'logger'):
|
||||
self.server.logger = create_logger(__name__)
|
||||
|
||||
return self.server.logger
|
||||
|
||||
def log_request(self):
|
||||
if '101' not in str(self.status):
|
||||
self.logger.info(self.format_request())
|
||||
|
||||
@property
|
||||
def active_client(self):
|
||||
return self.server.clients[self.client_address]
|
||||
|
||||
def start_response(self, status, headers, exc_info=None):
|
||||
"""
|
||||
Called when the handler is ready to send a response back to the remote
|
||||
endpoint. A websocket connection may have not been created.
|
||||
"""
|
||||
writer = super(WebSocketHandler, self).start_response(
|
||||
status, headers, exc_info=exc_info)
|
||||
|
||||
self._prepare_response()
|
||||
|
||||
return writer
|
||||
|
||||
def _prepare_response(self):
|
||||
"""
|
||||
Sets up the ``pywsgi.Handler`` to work with a websocket response.
|
||||
|
||||
This is used by other projects that need to support WebSocket
|
||||
connections as part of a larger effort.
|
||||
"""
|
||||
assert not self.headers_sent
|
||||
|
||||
if not self.environ.get('wsgi.websocket'):
|
||||
# a WebSocket connection is not established, do nothing
|
||||
return
|
||||
|
||||
# So that `finalize_headers` doesn't write a Content-Length header
|
||||
self.provided_content_length = False
|
||||
|
||||
# The websocket is now controlling the response
|
||||
self.response_use_chunked = False
|
||||
|
||||
# Once the request is over, the connection must be closed
|
||||
self.close_connection = True
|
||||
|
||||
# Prevents the Date header from being written
|
||||
self.provided_date = True
|
31
libs/geventwebsocket/logging.py
Normal file
31
libs/geventwebsocket/logging.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
from logging import getLogger, StreamHandler, getLoggerClass, Formatter, DEBUG
|
||||
|
||||
|
||||
def create_logger(name, debug=False, format=None):
|
||||
Logger = getLoggerClass()
|
||||
|
||||
class DebugLogger(Logger):
|
||||
def getEffectiveLevel(x):
|
||||
if x.level == 0 and debug:
|
||||
return DEBUG
|
||||
else:
|
||||
return Logger.getEffectiveLevel(x)
|
||||
|
||||
class DebugHandler(StreamHandler):
|
||||
def emit(x, record):
|
||||
StreamHandler.emit(x, record) if debug else None
|
||||
|
||||
handler = DebugHandler()
|
||||
handler.setLevel(DEBUG)
|
||||
|
||||
if format:
|
||||
handler.setFormatter(Formatter(format))
|
||||
|
||||
logger = getLogger(name)
|
||||
del logger.handlers[:]
|
||||
logger.__class__ = DebugLogger
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
0
libs/geventwebsocket/protocols/__init__.py
Normal file
0
libs/geventwebsocket/protocols/__init__.py
Normal file
35
libs/geventwebsocket/protocols/base.py
Normal file
35
libs/geventwebsocket/protocols/base.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
class BaseProtocol(object):
|
||||
PROTOCOL_NAME = ''
|
||||
|
||||
def __init__(self, app):
|
||||
self._app = app
|
||||
|
||||
def on_open(self):
|
||||
self.app.on_open()
|
||||
|
||||
def on_message(self, message):
|
||||
self.app.on_message(message)
|
||||
|
||||
def on_close(self, reason=None):
|
||||
self.app.on_close(reason)
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
if self._app:
|
||||
return self._app
|
||||
else:
|
||||
raise Exception("No application coupled")
|
||||
|
||||
@property
|
||||
def server(self):
|
||||
if not hasattr(self.app, 'ws'):
|
||||
return None
|
||||
|
||||
return self.app.ws.handler.server
|
||||
|
||||
@property
|
||||
def handler(self):
|
||||
if not hasattr(self.app, 'ws'):
|
||||
return None
|
||||
|
||||
return self.app.ws.handler
|
235
libs/geventwebsocket/protocols/wamp.py
Normal file
235
libs/geventwebsocket/protocols/wamp.py
Normal file
|
@ -0,0 +1,235 @@
|
|||
import inspect
|
||||
import random
|
||||
import string
|
||||
import types
|
||||
|
||||
try:
|
||||
import ujson as json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
from .._compat import range_type, string_types
|
||||
from ..exceptions import WebSocketError
|
||||
from .base import BaseProtocol
|
||||
|
||||
|
||||
def export_rpc(arg=None):
|
||||
if isinstance(arg, types.FunctionType):
|
||||
arg._rpc = arg.__name__
|
||||
return arg
|
||||
|
||||
|
||||
def serialize(data):
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
class Prefixes(object):
|
||||
def __init__(self):
|
||||
self.prefixes = {}
|
||||
|
||||
def add(self, prefix, uri):
|
||||
self.prefixes[prefix] = uri
|
||||
|
||||
def resolve(self, curie_or_uri):
|
||||
if "http://" in curie_or_uri:
|
||||
return curie_or_uri
|
||||
elif ':' in curie_or_uri:
|
||||
prefix, proc = curie_or_uri.split(':', 1)
|
||||
return self.prefixes[prefix] + proc
|
||||
else:
|
||||
raise Exception(curie_or_uri)
|
||||
|
||||
|
||||
class RemoteProcedures(object):
|
||||
def __init__(self):
|
||||
self.calls = {}
|
||||
|
||||
def register_procedure(self, uri, proc):
|
||||
self.calls[uri] = proc
|
||||
|
||||
def register_object(self, uri, obj):
|
||||
for k in inspect.getmembers(obj, inspect.ismethod):
|
||||
if '_rpc' in k[1].__dict__:
|
||||
proc_uri = uri + k[1]._rpc
|
||||
self.calls[proc_uri] = (obj, k[1])
|
||||
|
||||
def call(self, uri, args):
|
||||
if uri in self.calls:
|
||||
proc = self.calls[uri]
|
||||
|
||||
# Do the correct call whether it's a function or instance method.
|
||||
if isinstance(proc, tuple):
|
||||
if proc[1].__self__ is None:
|
||||
# Create instance of object and call method
|
||||
return proc[1](proc[0](), *args)
|
||||
else:
|
||||
# Call bound method on instance
|
||||
return proc[1](*args)
|
||||
else:
|
||||
return self.calls[uri](*args)
|
||||
else:
|
||||
raise Exception("no such uri '{}'".format(uri))
|
||||
|
||||
|
||||
class Channels(object):
|
||||
def __init__(self):
|
||||
self.channels = {}
|
||||
|
||||
def create(self, uri, prefix_matching=False):
|
||||
if uri not in self.channels:
|
||||
self.channels[uri] = []
|
||||
|
||||
# TODO: implement prefix matching
|
||||
|
||||
def subscribe(self, uri, client):
|
||||
if uri in self.channels:
|
||||
self.channels[uri].append(client)
|
||||
|
||||
def unsubscribe(self, uri, client):
|
||||
if uri not in self.channels:
|
||||
return
|
||||
|
||||
client_index = self.channels[uri].index(client)
|
||||
self.channels[uri].pop(client_index)
|
||||
|
||||
if len(self.channels[uri]) == 0:
|
||||
del self.channels[uri]
|
||||
|
||||
def publish(self, uri, event, exclude=None, eligible=None):
|
||||
if uri not in self.channels:
|
||||
return
|
||||
|
||||
# TODO: exclude & eligible
|
||||
|
||||
msg = [WampProtocol.MSG_EVENT, uri, event]
|
||||
|
||||
for client in self.channels[uri]:
|
||||
try:
|
||||
client.ws.send(serialize(msg))
|
||||
except WebSocketError:
|
||||
# Seems someone didn't unsubscribe before disconnecting
|
||||
self.channels[uri].remove(client)
|
||||
|
||||
|
||||
class WampProtocol(BaseProtocol):
|
||||
MSG_WELCOME = 0
|
||||
MSG_PREFIX = 1
|
||||
MSG_CALL = 2
|
||||
MSG_CALL_RESULT = 3
|
||||
MSG_CALL_ERROR = 4
|
||||
MSG_SUBSCRIBE = 5
|
||||
MSG_UNSUBSCRIBE = 6
|
||||
MSG_PUBLISH = 7
|
||||
MSG_EVENT = 8
|
||||
|
||||
PROTOCOL_NAME = "wamp"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.procedures = RemoteProcedures()
|
||||
self.prefixes = Prefixes()
|
||||
self.session_id = ''.join(
|
||||
[random.choice(string.digits + string.letters)
|
||||
for i in range_type(16)])
|
||||
|
||||
super(WampProtocol, self).__init__(*args, **kwargs)
|
||||
|
||||
def register_procedure(self, *args, **kwargs):
|
||||
self.procedures.register_procedure(*args, **kwargs)
|
||||
|
||||
def register_object(self, *args, **kwargs):
|
||||
self.procedures.register_object(*args, **kwargs)
|
||||
|
||||
def register_pubsub(self, *args, **kwargs):
|
||||
if not hasattr(self.server, 'channels'):
|
||||
self.server.channels = Channels()
|
||||
|
||||
self.server.channels.create(*args, **kwargs)
|
||||
|
||||
def do_handshake(self):
|
||||
from geventwebsocket import get_version
|
||||
|
||||
welcome = [
|
||||
self.MSG_WELCOME,
|
||||
self.session_id,
|
||||
1,
|
||||
'gevent-websocket/' + get_version()
|
||||
]
|
||||
self.app.ws.send(serialize(welcome))
|
||||
|
||||
def _get_exception_info(self, e):
|
||||
uri = 'http://TODO#generic'
|
||||
desc = str(type(e))
|
||||
details = str(e)
|
||||
return [uri, desc, details]
|
||||
|
||||
def rpc_call(self, data):
|
||||
call_id, curie_or_uri = data[1:3]
|
||||
args = data[3:]
|
||||
|
||||
if not isinstance(call_id, string_types):
|
||||
raise Exception()
|
||||
if not isinstance(curie_or_uri, string_types):
|
||||
raise Exception()
|
||||
|
||||
uri = self.prefixes.resolve(curie_or_uri)
|
||||
|
||||
try:
|
||||
result = self.procedures.call(uri, args)
|
||||
result_msg = [self.MSG_CALL_RESULT, call_id, result]
|
||||
except Exception as e:
|
||||
result_msg = [self.MSG_CALL_ERROR,
|
||||
call_id] + self._get_exception_info(e)
|
||||
|
||||
self.app.on_message(serialize(result_msg))
|
||||
|
||||
def pubsub_action(self, data):
|
||||
action = data[0]
|
||||
curie_or_uri = data[1]
|
||||
|
||||
if not isinstance(action, int):
|
||||
raise Exception()
|
||||
if not isinstance(curie_or_uri, string_types):
|
||||
raise Exception()
|
||||
|
||||
uri = self.prefixes.resolve(curie_or_uri)
|
||||
|
||||
if action == self.MSG_SUBSCRIBE and len(data) == 2:
|
||||
self.server.channels.subscribe(data[1], self.handler.active_client)
|
||||
|
||||
elif action == self.MSG_UNSUBSCRIBE and len(data) == 2:
|
||||
self.server.channels.unsubscribe(
|
||||
data[1], self.handler.active_client)
|
||||
|
||||
elif action == self.MSG_PUBLISH and len(data) >= 3:
|
||||
payload = data[2] if len(data) >= 3 else None
|
||||
exclude = data[3] if len(data) >= 4 else None
|
||||
eligible = data[4] if len(data) >= 5 else None
|
||||
|
||||
self.server.channels.publish(uri, payload, exclude, eligible)
|
||||
|
||||
def on_open(self):
|
||||
self.app.on_open()
|
||||
self.do_handshake()
|
||||
|
||||
def on_message(self, message):
|
||||
data = json.loads(message)
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise Exception('incoming data is no list')
|
||||
|
||||
if data[0] == self.MSG_PREFIX and len(data) == 3:
|
||||
prefix, uri = data[1:3]
|
||||
self.prefixes.add(prefix, uri)
|
||||
|
||||
elif data[0] == self.MSG_CALL and len(data) >= 3:
|
||||
return self.rpc_call(data)
|
||||
|
||||
elif data[0] in (self.MSG_SUBSCRIBE, self.MSG_UNSUBSCRIBE,
|
||||
self.MSG_PUBLISH):
|
||||
return self.pubsub_action(data)
|
||||
else:
|
||||
raise Exception("Unknown call")
|
||||
|
100
libs/geventwebsocket/resource.py
Normal file
100
libs/geventwebsocket/resource.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
import re
|
||||
import warnings
|
||||
|
||||
from .protocols.base import BaseProtocol
|
||||
from .exceptions import WebSocketError
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
class OrderedDict:
|
||||
pass
|
||||
|
||||
|
||||
class WebSocketApplication(object):
|
||||
protocol_class = BaseProtocol
|
||||
|
||||
def __init__(self, ws):
|
||||
self.protocol = self.protocol_class(self)
|
||||
self.ws = ws
|
||||
|
||||
def handle(self):
|
||||
self.protocol.on_open()
|
||||
|
||||
while True:
|
||||
try:
|
||||
message = self.ws.receive()
|
||||
except WebSocketError:
|
||||
self.protocol.on_close()
|
||||
break
|
||||
|
||||
self.protocol.on_message(message)
|
||||
|
||||
def on_open(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def on_close(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def on_message(self, message, *args, **kwargs):
|
||||
self.ws.send(message, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def protocol_name(cls):
|
||||
return cls.protocol_class.PROTOCOL_NAME
|
||||
|
||||
|
||||
class Resource(object):
|
||||
def __init__(self, apps=None):
|
||||
self.apps = apps if apps else []
|
||||
|
||||
if isinstance(apps, dict):
|
||||
if not isinstance(apps, OrderedDict):
|
||||
warnings.warn("Using an unordered dictionary for the "
|
||||
"app list is discouraged and may lead to "
|
||||
"undefined behavior.", UserWarning)
|
||||
|
||||
self.apps = apps.items()
|
||||
|
||||
# An app can either be a standard WSGI application (an object we call with
|
||||
# __call__(self, environ, start_response)) or a class we instantiate
|
||||
# (and which can handle websockets). This function tells them apart.
|
||||
# Override this if you have apps that can handle websockets but don't
|
||||
# fulfill these criteria.
|
||||
def _is_websocket_app(self, app):
|
||||
return isinstance(app, type) and issubclass(app, WebSocketApplication)
|
||||
|
||||
def _app_by_path(self, environ_path, is_websocket_request):
|
||||
# Which app matched the current path?
|
||||
for path, app in self.apps:
|
||||
if re.match(path, environ_path):
|
||||
if is_websocket_request == self._is_websocket_app(app):
|
||||
return app
|
||||
return None
|
||||
|
||||
def app_protocol(self, path):
|
||||
# app_protocol will only be called for websocket apps
|
||||
app = self._app_by_path(path, True)
|
||||
|
||||
if hasattr(app, 'protocol_name'):
|
||||
return app.protocol_name()
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
environ = environ
|
||||
is_websocket_call = 'wsgi.websocket' in environ
|
||||
current_app = self._app_by_path(environ['PATH_INFO'], is_websocket_call)
|
||||
|
||||
if current_app is None:
|
||||
raise Exception("No apps defined")
|
||||
|
||||
if is_websocket_call:
|
||||
ws = environ['wsgi.websocket']
|
||||
current_app = current_app(ws)
|
||||
current_app.ws = ws # TODO: needed?
|
||||
current_app.handle()
|
||||
# Always return something, calling WSGI middleware may rely on it
|
||||
return []
|
||||
else:
|
||||
return current_app(environ, start_response)
|
34
libs/geventwebsocket/server.py
Normal file
34
libs/geventwebsocket/server.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
from gevent.pywsgi import WSGIServer
|
||||
|
||||
from .handler import WebSocketHandler
|
||||
from .logging import create_logger
|
||||
|
||||
|
||||
class WebSocketServer(WSGIServer):
|
||||
handler_class = WebSocketHandler
|
||||
debug_log_format = (
|
||||
'-' * 80 + '\n' +
|
||||
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
|
||||
'%(message)s\n' +
|
||||
'-' * 80
|
||||
)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.debug = kwargs.pop('debug', False)
|
||||
self.pre_start_hook = kwargs.pop('pre_start_hook', None)
|
||||
self._logger = None
|
||||
self.clients = {}
|
||||
|
||||
super(WebSocketServer, self).__init__(*args, **kwargs)
|
||||
|
||||
def handle(self, socket, address):
|
||||
handler = self.handler_class(socket, address, self)
|
||||
handler.handle()
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
if not self._logger:
|
||||
self._logger = create_logger(
|
||||
__name__, self.debug, self.debug_log_format)
|
||||
|
||||
return self._logger
|
224
libs/geventwebsocket/utf8validator.py
Normal file
224
libs/geventwebsocket/utf8validator.py
Normal file
|
@ -0,0 +1,224 @@
|
|||
from ._compat import PY3
|
||||
|
||||
###############################################################################
|
||||
#
|
||||
# The MIT License (MIT)
|
||||
#
|
||||
# Copyright (c) Crossbar.io Technologies GmbH
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
# Note: This code is a Python implementation of the algorithm
|
||||
# "Flexible and Economical UTF-8 Decoder" by Bjoern Hoehrmann
|
||||
# bjoern@hoehrmann.de, http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
|
||||
|
||||
__all__ = ("Utf8Validator",)
|
||||
|
||||
|
||||
# DFA transitions
|
||||
UTF8VALIDATOR_DFA = (
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 00..1f
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 20..3f
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 40..5f
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 60..7f
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, # 80..9f
|
||||
7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # a0..bf
|
||||
8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # c0..df
|
||||
0xa, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, # e0..ef
|
||||
0xb, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, # f0..ff
|
||||
0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, # s0..s0
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, # s1..s2
|
||||
1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, # s3..s4
|
||||
1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, # s5..s6
|
||||
1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # s7..s8
|
||||
)
|
||||
|
||||
UTF8_ACCEPT = 0
|
||||
UTF8_REJECT = 1
|
||||
|
||||
|
||||
# use Cython implementation of UTF8 validator if available
|
||||
#
|
||||
try:
|
||||
from wsaccel.utf8validator import Utf8Validator
|
||||
|
||||
except ImportError:
|
||||
#
|
||||
# Fallback to pure Python implementation - also for PyPy.
|
||||
#
|
||||
# Do NOT touch this code unless you know what you are doing!
|
||||
# https://github.com/oberstet/scratchbox/tree/master/python/utf8
|
||||
#
|
||||
|
||||
if PY3:
|
||||
|
||||
# Python 3 and above
|
||||
|
||||
# convert DFA table to bytes (performance)
|
||||
UTF8VALIDATOR_DFA_S = bytes(UTF8VALIDATOR_DFA)
|
||||
|
||||
class Utf8Validator(object):
|
||||
"""
|
||||
Incremental UTF-8 validator with constant memory consumption (minimal state).
|
||||
|
||||
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
|
||||
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.reset()
|
||||
|
||||
def decode(self, b):
|
||||
"""
|
||||
Eat one UTF-8 octet, and validate on the fly.
|
||||
|
||||
Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case
|
||||
``self.codepoint`` contains the decoded Unicode code point.
|
||||
|
||||
Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered.
|
||||
|
||||
Returns some other positive integer when more octets need to be eaten.
|
||||
"""
|
||||
tt = UTF8VALIDATOR_DFA_S[b]
|
||||
if self.state != UTF8_ACCEPT:
|
||||
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
|
||||
else:
|
||||
self.codepoint = (0xff >> tt) & b
|
||||
self.state = UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt]
|
||||
return self.state
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Reset validator to start new incremental UTF-8 decode/validation.
|
||||
"""
|
||||
self.state = UTF8_ACCEPT # the empty string is valid UTF8
|
||||
self.codepoint = 0
|
||||
self.i = 0
|
||||
|
||||
def validate(self, ba):
|
||||
"""
|
||||
Incrementally validate a chunk of bytes provided as string.
|
||||
|
||||
Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``.
|
||||
|
||||
As soon as an octet is encountered which renders the octet sequence
|
||||
invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns
|
||||
the index within the currently consumed chunk, and ``totalIndex`` the
|
||||
index within the total consumed sequence that was the point of bail out.
|
||||
When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the
|
||||
total amount of consumed bytes.
|
||||
"""
|
||||
#
|
||||
# The code here is written for optimal JITting in PyPy, not for best
|
||||
# readability by your grandma or particular elegance. Do NOT touch!
|
||||
#
|
||||
l = len(ba)
|
||||
i = 0
|
||||
state = self.state
|
||||
while i < l:
|
||||
# optimized version of decode(), since we are not interested in actual code points
|
||||
state = UTF8VALIDATOR_DFA_S[256 + (state << 4) + UTF8VALIDATOR_DFA_S[ba[i]]]
|
||||
if state == UTF8_REJECT:
|
||||
self.state = state
|
||||
self.i += i
|
||||
return False, False, i, self.i
|
||||
i += 1
|
||||
self.state = state
|
||||
self.i += l
|
||||
return True, state == UTF8_ACCEPT, l, self.i
|
||||
|
||||
else:
|
||||
|
||||
# convert DFA table to string (performance)
|
||||
UTF8VALIDATOR_DFA_S = ''.join([chr(c) for c in UTF8VALIDATOR_DFA])
|
||||
|
||||
class Utf8Validator(object):
|
||||
"""
|
||||
Incremental UTF-8 validator with constant memory consumption (minimal state).
|
||||
|
||||
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
|
||||
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.reset()
|
||||
|
||||
def decode(self, b):
|
||||
"""
|
||||
Eat one UTF-8 octet, and validate on the fly.
|
||||
|
||||
Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case
|
||||
``self.codepoint`` contains the decoded Unicode code point.
|
||||
|
||||
Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered.
|
||||
|
||||
Returns some other positive integer when more octets need to be eaten.
|
||||
"""
|
||||
tt = ord(UTF8VALIDATOR_DFA_S[b])
|
||||
if self.state != UTF8_ACCEPT:
|
||||
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
|
||||
else:
|
||||
self.codepoint = (0xff >> tt) & b
|
||||
self.state = ord(UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt])
|
||||
return self.state
|
||||
|
||||
def reset(self):
|
||||
"""
|
||||
Reset validator to start new incremental UTF-8 decode/validation.
|
||||
"""
|
||||
self.state = UTF8_ACCEPT # the empty string is valid UTF8
|
||||
self.codepoint = 0
|
||||
self.i = 0
|
||||
|
||||
def validate(self, ba):
|
||||
"""
|
||||
Incrementally validate a chunk of bytes provided as string.
|
||||
|
||||
Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``.
|
||||
|
||||
As soon as an octet is encountered which renders the octet sequence
|
||||
invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns
|
||||
the index within the currently consumed chunk, and ``totalIndex`` the
|
||||
index within the total consumed sequence that was the point of bail out.
|
||||
When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the
|
||||
total amount of consumed bytes.
|
||||
"""
|
||||
#
|
||||
# The code here is written for optimal JITting in PyPy, not for best
|
||||
# readability by your grandma or particular elegance. Do NOT touch!
|
||||
#
|
||||
l = len(ba)
|
||||
i = 0
|
||||
state = self.state
|
||||
while i < l:
|
||||
# optimized version of decode(), since we are not interested in actual code points
|
||||
try:
|
||||
state = ord(UTF8VALIDATOR_DFA_S[256 + (state << 4) + ord(UTF8VALIDATOR_DFA_S[ba[i]])])
|
||||
except:
|
||||
import ipdb; ipdb.set_trace()
|
||||
if state == UTF8_REJECT:
|
||||
self.state = state
|
||||
self.i += i
|
||||
return False, False, i, self.i
|
||||
i += 1
|
||||
self.state = state
|
||||
self.i += l
|
||||
return True, state == UTF8_ACCEPT, l, self.i
|
45
libs/geventwebsocket/utils.py
Normal file
45
libs/geventwebsocket/utils.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
import subprocess
|
||||
|
||||
|
||||
def get_version(version=None):
|
||||
"Returns a PEP 386-compliant version number from VERSION."
|
||||
|
||||
if version is None:
|
||||
from geventwebsocket import VERSION as version
|
||||
else:
|
||||
assert len(version) == 5
|
||||
assert version[3] in ('alpha', 'beta', 'rc', 'final')
|
||||
|
||||
# Now build the two parts of the version number:
|
||||
# main = X.Y[.Z]
|
||||
# sub = .devN - for pre-alpha releases
|
||||
# | {a|b|c}N - for alpha, beta and rc releases
|
||||
|
||||
parts = 2 if version[2] == 0 else 3
|
||||
main = '.'.join(str(x) for x in version[:parts])
|
||||
|
||||
sub = ''
|
||||
if version[3] == 'alpha' and version[4] == 0:
|
||||
hg_changeset = get_hg_changeset()
|
||||
if hg_changeset:
|
||||
sub = '.dev{0}'.format(hg_changeset)
|
||||
|
||||
elif version[3] != 'final':
|
||||
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
|
||||
sub = mapping[version[3]] + str(version[4])
|
||||
|
||||
return str(main + sub)
|
||||
|
||||
|
||||
def get_hg_changeset():
|
||||
rev, err = subprocess.Popen(
|
||||
'hg id -i',
|
||||
shell=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
).communicate()
|
||||
|
||||
if err:
|
||||
return None
|
||||
else:
|
||||
return rev.strip().replace('+', '')
|
565
libs/geventwebsocket/websocket.py
Normal file
565
libs/geventwebsocket/websocket.py
Normal file
|
@ -0,0 +1,565 @@
|
|||
import struct
|
||||
|
||||
from socket import error
|
||||
|
||||
from ._compat import string_types, range_type, text_type
|
||||
from .exceptions import ProtocolError
|
||||
from .exceptions import WebSocketError
|
||||
from .exceptions import FrameTooLargeException
|
||||
from .utf8validator import Utf8Validator
|
||||
|
||||
|
||||
MSG_SOCKET_DEAD = "Socket is dead"
|
||||
MSG_ALREADY_CLOSED = "Connection is already closed"
|
||||
MSG_CLOSED = "Connection closed"
|
||||
|
||||
|
||||
class WebSocket(object):
|
||||
"""
|
||||
Base class for supporting websocket operations.
|
||||
|
||||
:ivar environ: The http environment referenced by this connection.
|
||||
:ivar closed: Whether this connection is closed/closing.
|
||||
:ivar stream: The underlying file like object that will be read from /
|
||||
written to by this WebSocket object.
|
||||
"""
|
||||
|
||||
__slots__ = ('utf8validator', 'utf8validate_last', 'environ', 'closed',
|
||||
'stream', 'raw_write', 'raw_read', 'handler')
|
||||
|
||||
OPCODE_CONTINUATION = 0x00
|
||||
OPCODE_TEXT = 0x01
|
||||
OPCODE_BINARY = 0x02
|
||||
OPCODE_CLOSE = 0x08
|
||||
OPCODE_PING = 0x09
|
||||
OPCODE_PONG = 0x0a
|
||||
|
||||
def __init__(self, environ, stream, handler):
|
||||
self.environ = environ
|
||||
self.closed = False
|
||||
|
||||
self.stream = stream
|
||||
|
||||
self.raw_write = stream.write
|
||||
self.raw_read = stream.read
|
||||
|
||||
self.utf8validator = Utf8Validator()
|
||||
self.handler = handler
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.close()
|
||||
except:
|
||||
# close() may fail if __init__ didn't complete
|
||||
pass
|
||||
|
||||
def _decode_bytes(self, bytestring):
|
||||
"""
|
||||
Internal method used to convert the utf-8 encoded bytestring into
|
||||
unicode.
|
||||
|
||||
If the conversion fails, the socket will be closed.
|
||||
"""
|
||||
|
||||
if not bytestring:
|
||||
return ''
|
||||
|
||||
try:
|
||||
return bytestring.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
self.close(1007)
|
||||
|
||||
raise
|
||||
|
||||
def _encode_bytes(self, text):
|
||||
"""
|
||||
:returns: The utf-8 byte string equivalent of `text`.
|
||||
"""
|
||||
|
||||
if not isinstance(text, str):
|
||||
text = text_type(text or '')
|
||||
|
||||
return text.encode("utf-8")
|
||||
|
||||
def _is_valid_close_code(self, code):
|
||||
"""
|
||||
:returns: Whether the returned close code is a valid hybi return code.
|
||||
"""
|
||||
if code < 1000:
|
||||
return False
|
||||
|
||||
if 1004 <= code <= 1006:
|
||||
return False
|
||||
|
||||
if 1012 <= code <= 1016:
|
||||
return False
|
||||
|
||||
if code == 1100:
|
||||
# not sure about this one but the autobahn fuzzer requires it.
|
||||
return False
|
||||
|
||||
if 2000 <= code <= 2999:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def current_app(self):
|
||||
if hasattr(self.handler.server.application, 'current_app'):
|
||||
return self.handler.server.application.current_app
|
||||
else:
|
||||
# For backwards compatibility reasons
|
||||
class MockApp():
|
||||
def on_close(self, *args):
|
||||
pass
|
||||
|
||||
return MockApp()
|
||||
|
||||
@property
|
||||
def origin(self):
|
||||
if not self.environ:
|
||||
return
|
||||
|
||||
return self.environ.get('HTTP_ORIGIN')
|
||||
|
||||
@property
|
||||
def protocol(self):
|
||||
if not self.environ:
|
||||
return
|
||||
|
||||
return self.environ.get('HTTP_SEC_WEBSOCKET_PROTOCOL')
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
if not self.environ:
|
||||
return
|
||||
|
||||
return self.environ.get('HTTP_SEC_WEBSOCKET_VERSION')
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
if not self.environ:
|
||||
return
|
||||
|
||||
return self.environ.get('PATH_INFO')
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
return self.handler.logger
|
||||
|
||||
def handle_close(self, header, payload):
|
||||
"""
|
||||
Called when a close frame has been decoded from the stream.
|
||||
|
||||
:param header: The decoded `Header`.
|
||||
:param payload: The bytestring payload associated with the close frame.
|
||||
"""
|
||||
if not payload:
|
||||
self.close(1000, None)
|
||||
|
||||
return
|
||||
|
||||
if len(payload) < 2:
|
||||
raise ProtocolError('Invalid close frame: {0} {1}'.format(
|
||||
header, payload))
|
||||
|
||||
code = struct.unpack('!H', payload[:2])[0]
|
||||
payload = payload[2:]
|
||||
|
||||
if payload:
|
||||
validator = Utf8Validator()
|
||||
val = validator.validate(payload)
|
||||
|
||||
if not val[0]:
|
||||
raise UnicodeError
|
||||
|
||||
if not self._is_valid_close_code(code):
|
||||
raise ProtocolError('Invalid close code {0}'.format(code))
|
||||
|
||||
self.close(code, payload)
|
||||
|
||||
def handle_ping(self, header, payload):
|
||||
self.send_frame(payload, self.OPCODE_PONG)
|
||||
|
||||
def handle_pong(self, header, payload):
|
||||
pass
|
||||
|
||||
def read_frame(self):
|
||||
"""
|
||||
Block until a full frame has been read from the socket.
|
||||
|
||||
This is an internal method as calling this will not cleanup correctly
|
||||
if an exception is called. Use `receive` instead.
|
||||
|
||||
:return: The header and payload as a tuple.
|
||||
"""
|
||||
|
||||
header = Header.decode_header(self.stream)
|
||||
|
||||
if header.flags:
|
||||
raise ProtocolError
|
||||
|
||||
if not header.length:
|
||||
return header, b''
|
||||
|
||||
try:
|
||||
payload = self.raw_read(header.length)
|
||||
except error:
|
||||
payload = b''
|
||||
except Exception:
|
||||
# TODO log out this exception
|
||||
payload = b''
|
||||
|
||||
if len(payload) != header.length:
|
||||
raise WebSocketError('Unexpected EOF reading frame payload')
|
||||
|
||||
if header.mask:
|
||||
payload = header.unmask_payload(payload)
|
||||
|
||||
return header, payload
|
||||
|
||||
def validate_utf8(self, payload):
|
||||
# Make sure the frames are decodable independently
|
||||
self.utf8validate_last = self.utf8validator.validate(payload)
|
||||
|
||||
if not self.utf8validate_last[0]:
|
||||
raise UnicodeError("Encountered invalid UTF-8 while processing "
|
||||
"text message at payload octet index "
|
||||
"{0:d}".format(self.utf8validate_last[3]))
|
||||
|
||||
def read_message(self):
|
||||
"""
|
||||
Return the next text or binary message from the socket.
|
||||
|
||||
This is an internal method as calling this will not cleanup correctly
|
||||
if an exception is called. Use `receive` instead.
|
||||
"""
|
||||
opcode = None
|
||||
message = bytearray()
|
||||
|
||||
while True:
|
||||
header, payload = self.read_frame()
|
||||
f_opcode = header.opcode
|
||||
|
||||
if f_opcode in (self.OPCODE_TEXT, self.OPCODE_BINARY):
|
||||
# a new frame
|
||||
if opcode:
|
||||
raise ProtocolError("The opcode in non-fin frame is "
|
||||
"expected to be zero, got "
|
||||
"{0!r}".format(f_opcode))
|
||||
|
||||
# Start reading a new message, reset the validator
|
||||
self.utf8validator.reset()
|
||||
self.utf8validate_last = (True, True, 0, 0)
|
||||
|
||||
opcode = f_opcode
|
||||
|
||||
elif f_opcode == self.OPCODE_CONTINUATION:
|
||||
if not opcode:
|
||||
raise ProtocolError("Unexpected frame with opcode=0")
|
||||
|
||||
elif f_opcode == self.OPCODE_PING:
|
||||
self.handle_ping(header, payload)
|
||||
continue
|
||||
|
||||
elif f_opcode == self.OPCODE_PONG:
|
||||
self.handle_pong(header, payload)
|
||||
continue
|
||||
|
||||
elif f_opcode == self.OPCODE_CLOSE:
|
||||
self.handle_close(header, payload)
|
||||
return
|
||||
|
||||
else:
|
||||
raise ProtocolError("Unexpected opcode={0!r}".format(f_opcode))
|
||||
|
||||
if opcode == self.OPCODE_TEXT:
|
||||
self.validate_utf8(payload)
|
||||
|
||||
message += payload
|
||||
|
||||
if header.fin:
|
||||
break
|
||||
|
||||
if opcode == self.OPCODE_TEXT:
|
||||
self.validate_utf8(message)
|
||||
return self._decode_bytes(message)
|
||||
else:
|
||||
return message
|
||||
|
||||
def receive(self):
|
||||
"""
|
||||
Read and return a message from the stream. If `None` is returned, then
|
||||
the socket is considered closed/errored.
|
||||
"""
|
||||
|
||||
if self.closed:
|
||||
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||
raise WebSocketError(MSG_ALREADY_CLOSED)
|
||||
|
||||
try:
|
||||
return self.read_message()
|
||||
except UnicodeError:
|
||||
self.close(1007)
|
||||
except ProtocolError:
|
||||
self.close(1002)
|
||||
except error:
|
||||
self.close()
|
||||
self.current_app.on_close(MSG_CLOSED)
|
||||
|
||||
return None
|
||||
|
||||
def send_frame(self, message, opcode):
|
||||
"""
|
||||
Send a frame over the websocket with message as its payload
|
||||
"""
|
||||
if self.closed:
|
||||
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||
raise WebSocketError(MSG_ALREADY_CLOSED)
|
||||
|
||||
if opcode in (self.OPCODE_TEXT, self.OPCODE_PING):
|
||||
message = self._encode_bytes(message)
|
||||
elif opcode == self.OPCODE_BINARY:
|
||||
message = bytes(message)
|
||||
|
||||
header = Header.encode_header(True, opcode, b'', len(message), 0)
|
||||
|
||||
try:
|
||||
self.raw_write(header + message)
|
||||
except error:
|
||||
raise WebSocketError(MSG_SOCKET_DEAD)
|
||||
except:
|
||||
raise
|
||||
|
||||
def send(self, message, binary=None):
|
||||
"""
|
||||
Send a frame over the websocket with message as its payload
|
||||
"""
|
||||
if binary is None:
|
||||
binary = not isinstance(message, string_types)
|
||||
|
||||
opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT
|
||||
|
||||
try:
|
||||
self.send_frame(message, opcode)
|
||||
except WebSocketError:
|
||||
self.current_app.on_close(MSG_SOCKET_DEAD)
|
||||
raise WebSocketError(MSG_SOCKET_DEAD)
|
||||
|
||||
def close(self, code=1000, message=b''):
|
||||
"""
|
||||
Close the websocket and connection, sending the specified code and
|
||||
message. The underlying socket object is _not_ closed, that is the
|
||||
responsibility of the initiator.
|
||||
"""
|
||||
|
||||
if self.closed:
|
||||
self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||
|
||||
try:
|
||||
message = self._encode_bytes(message)
|
||||
|
||||
self.send_frame(message, opcode=self.OPCODE_CLOSE)
|
||||
except WebSocketError:
|
||||
# Failed to write the closing frame but it's ok because we're
|
||||
# closing the socket anyway.
|
||||
self.logger.debug("Failed to write closing frame -> closing socket")
|
||||
finally:
|
||||
self.logger.debug("Closed WebSocket")
|
||||
self.closed = True
|
||||
|
||||
self.stream = None
|
||||
self.raw_write = None
|
||||
self.raw_read = None
|
||||
|
||||
self.environ = None
|
||||
|
||||
#self.current_app.on_close(MSG_ALREADY_CLOSED)
|
||||
|
||||
|
||||
class Stream(object):
|
||||
"""
|
||||
Wraps the handler's socket/rfile attributes and makes it in to a file like
|
||||
object that can be read from/written to by the lower level websocket api.
|
||||
"""
|
||||
|
||||
__slots__ = ('handler', 'read', 'write')
|
||||
|
||||
def __init__(self, handler):
|
||||
self.handler = handler
|
||||
self.read = handler.rfile.read
|
||||
self.write = handler.socket.sendall
|
||||
|
||||
|
||||
class Header(object):
|
||||
__slots__ = ('fin', 'mask', 'opcode', 'flags', 'length')
|
||||
|
||||
FIN_MASK = 0x80
|
||||
OPCODE_MASK = 0x0f
|
||||
MASK_MASK = 0x80
|
||||
LENGTH_MASK = 0x7f
|
||||
|
||||
RSV0_MASK = 0x40
|
||||
RSV1_MASK = 0x20
|
||||
RSV2_MASK = 0x10
|
||||
|
||||
# bitwise mask that will determine the reserved bits for a frame header
|
||||
HEADER_FLAG_MASK = RSV0_MASK | RSV1_MASK | RSV2_MASK
|
||||
|
||||
def __init__(self, fin=0, opcode=0, flags=0, length=0):
|
||||
self.mask = ''
|
||||
self.fin = fin
|
||||
self.opcode = opcode
|
||||
self.flags = flags
|
||||
self.length = length
|
||||
|
||||
def mask_payload(self, payload):
|
||||
payload = bytearray(payload)
|
||||
mask = bytearray(self.mask)
|
||||
|
||||
for i in range_type(self.length):
|
||||
payload[i] ^= mask[i % 4]
|
||||
|
||||
return payload
|
||||
|
||||
# it's the same operation
|
||||
unmask_payload = mask_payload
|
||||
|
||||
def __repr__(self):
|
||||
opcodes = {
|
||||
0: 'continuation(0)',
|
||||
1: 'text(1)',
|
||||
2: 'binary(2)',
|
||||
8: 'close(8)',
|
||||
9: 'ping(9)',
|
||||
10: 'pong(10)'
|
||||
}
|
||||
flags = {
|
||||
0x40: 'RSV1 MASK',
|
||||
0x20: 'RSV2 MASK',
|
||||
0x10: 'RSV3 MASK'
|
||||
}
|
||||
|
||||
return ("<Header fin={0} opcode={1} length={2} flags={3} mask={4} at "
|
||||
"0x{5:x}>").format(
|
||||
self.fin,
|
||||
opcodes.get(self.opcode, 'reserved({})'.format(self.opcode)),
|
||||
self.length,
|
||||
flags.get(self.flags, 'reserved({})'.format(self.flags)),
|
||||
self.mask, id(self)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def decode_header(cls, stream):
|
||||
"""
|
||||
Decode a WebSocket header.
|
||||
|
||||
:param stream: A file like object that can be 'read' from.
|
||||
:returns: A `Header` instance.
|
||||
"""
|
||||
read = stream.read
|
||||
data = read(2)
|
||||
|
||||
if len(data) != 2:
|
||||
raise WebSocketError("Unexpected EOF while decoding header")
|
||||
|
||||
first_byte, second_byte = struct.unpack('!BB', data)
|
||||
|
||||
header = cls(
|
||||
fin=first_byte & cls.FIN_MASK == cls.FIN_MASK,
|
||||
opcode=first_byte & cls.OPCODE_MASK,
|
||||
flags=first_byte & cls.HEADER_FLAG_MASK,
|
||||
length=second_byte & cls.LENGTH_MASK)
|
||||
|
||||
has_mask = second_byte & cls.MASK_MASK == cls.MASK_MASK
|
||||
|
||||
if header.opcode > 0x07:
|
||||
if not header.fin:
|
||||
raise ProtocolError(
|
||||
"Received fragmented control frame: {0!r}".format(data))
|
||||
|
||||
# Control frames MUST have a payload length of 125 bytes or less
|
||||
if header.length > 125:
|
||||
raise FrameTooLargeException(
|
||||
"Control frame cannot be larger than 125 bytes: "
|
||||
"{0!r}".format(data))
|
||||
|
||||
if header.length == 126:
|
||||
# 16 bit length
|
||||
data = read(2)
|
||||
|
||||
if len(data) != 2:
|
||||
raise WebSocketError('Unexpected EOF while decoding header')
|
||||
|
||||
header.length = struct.unpack('!H', data)[0]
|
||||
elif header.length == 127:
|
||||
# 64 bit length
|
||||
data = read(8)
|
||||
|
||||
if len(data) != 8:
|
||||
raise WebSocketError('Unexpected EOF while decoding header')
|
||||
|
||||
header.length = struct.unpack('!Q', data)[0]
|
||||
|
||||
if has_mask:
|
||||
mask = read(4)
|
||||
|
||||
if len(mask) != 4:
|
||||
raise WebSocketError('Unexpected EOF while decoding header')
|
||||
|
||||
header.mask = mask
|
||||
|
||||
return header
|
||||
|
||||
@classmethod
|
||||
def encode_header(cls, fin, opcode, mask, length, flags):
|
||||
"""
|
||||
Encodes a WebSocket header.
|
||||
|
||||
:param fin: Whether this is the final frame for this opcode.
|
||||
:param opcode: The opcode of the payload, see `OPCODE_*`
|
||||
:param mask: Whether the payload is masked.
|
||||
:param length: The length of the frame.
|
||||
:param flags: The RSV* flags.
|
||||
:return: A bytestring encoded header.
|
||||
"""
|
||||
first_byte = opcode
|
||||
second_byte = 0
|
||||
extra = b""
|
||||
result = bytearray()
|
||||
|
||||
if fin:
|
||||
first_byte |= cls.FIN_MASK
|
||||
|
||||
if flags & cls.RSV0_MASK:
|
||||
first_byte |= cls.RSV0_MASK
|
||||
|
||||
if flags & cls.RSV1_MASK:
|
||||
first_byte |= cls.RSV1_MASK
|
||||
|
||||
if flags & cls.RSV2_MASK:
|
||||
first_byte |= cls.RSV2_MASK
|
||||
|
||||
# now deal with length complexities
|
||||
if length < 126:
|
||||
second_byte += length
|
||||
elif length <= 0xffff:
|
||||
second_byte += 126
|
||||
extra = struct.pack('!H', length)
|
||||
elif length <= 0xffffffffffffffff:
|
||||
second_byte += 127
|
||||
extra = struct.pack('!Q', length)
|
||||
else:
|
||||
raise FrameTooLargeException
|
||||
|
||||
if mask:
|
||||
second_byte |= cls.MASK_MASK
|
||||
|
||||
result.append(first_byte)
|
||||
result.append(second_byte)
|
||||
result.extend(extra)
|
||||
|
||||
if mask:
|
||||
result.extend(mask)
|
||||
|
||||
return result
|
0
libs/html5lib/__init__.py
Normal file
0
libs/html5lib/__init__.py
Normal file
|
@ -9,6 +9,7 @@ chardet=3.0.4
|
|||
configparser2=4.0.0
|
||||
dogpile.cache=0.6.5
|
||||
enzyme=0.4.1
|
||||
geventwebsocker=0.10.1
|
||||
gitpython=2.1.9
|
||||
guessit=2.1.4
|
||||
langdetect=1.0.7
|
||||
|
|
697
static/noty/noty.css
Normal file
697
static/noty/noty.css
Normal file
|
@ -0,0 +1,697 @@
|
|||
.noty_layout_mixin, #noty_layout__top, #noty_layout__topLeft, #noty_layout__topCenter, #noty_layout__topRight, #noty_layout__bottom, #noty_layout__bottomLeft, #noty_layout__bottomCenter, #noty_layout__bottomRight, #noty_layout__center, #noty_layout__centerLeft, #noty_layout__centerRight {
|
||||
position: fixed;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
z-index: 9999999;
|
||||
-webkit-transform: translateZ(0) scale(1, 1);
|
||||
transform: translateZ(0) scale(1, 1);
|
||||
-webkit-backface-visibility: hidden;
|
||||
backface-visibility: hidden;
|
||||
-webkit-font-smoothing: subpixel-antialiased;
|
||||
filter: blur(0);
|
||||
-webkit-filter: blur(0);
|
||||
max-width: 90%; }
|
||||
|
||||
#noty_layout__top {
|
||||
top: 0;
|
||||
left: 5%;
|
||||
width: 90%; }
|
||||
|
||||
#noty_layout__topLeft {
|
||||
top: 20px;
|
||||
left: 20px;
|
||||
width: 325px; }
|
||||
|
||||
#noty_layout__topCenter {
|
||||
top: 5%;
|
||||
left: 50%;
|
||||
width: 325px;
|
||||
-webkit-transform: translate(-webkit-calc(-50% - .5px)) translateZ(0) scale(1, 1);
|
||||
transform: translate(calc(-50% - .5px)) translateZ(0) scale(1, 1); }
|
||||
|
||||
#noty_layout__topRight {
|
||||
top: 20px;
|
||||
right: 20px;
|
||||
width: 325px; }
|
||||
|
||||
#noty_layout__bottom {
|
||||
bottom: 0;
|
||||
left: 5%;
|
||||
width: 90%; }
|
||||
|
||||
#noty_layout__bottomLeft {
|
||||
bottom: 20px;
|
||||
left: 20px;
|
||||
width: 325px; }
|
||||
|
||||
#noty_layout__bottomCenter {
|
||||
bottom: 5%;
|
||||
left: 50%;
|
||||
width: 325px;
|
||||
-webkit-transform: translate(-webkit-calc(-50% - .5px)) translateZ(0) scale(1, 1);
|
||||
transform: translate(calc(-50% - .5px)) translateZ(0) scale(1, 1); }
|
||||
|
||||
#noty_layout__bottomRight {
|
||||
bottom: 20px;
|
||||
right: 20px;
|
||||
width: 325px; }
|
||||
|
||||
#noty_layout__center {
|
||||
top: 50%;
|
||||
left: 50%;
|
||||
width: 325px;
|
||||
-webkit-transform: translate(-webkit-calc(-50% - .5px), -webkit-calc(-50% - .5px)) translateZ(0) scale(1, 1);
|
||||
transform: translate(calc(-50% - .5px), calc(-50% - .5px)) translateZ(0) scale(1, 1); }
|
||||
|
||||
#noty_layout__centerLeft {
|
||||
top: 50%;
|
||||
left: 20px;
|
||||
width: 325px;
|
||||
-webkit-transform: translate(0, -webkit-calc(-50% - .5px)) translateZ(0) scale(1, 1);
|
||||
transform: translate(0, calc(-50% - .5px)) translateZ(0) scale(1, 1); }
|
||||
|
||||
#noty_layout__centerRight {
|
||||
top: 50%;
|
||||
right: 20px;
|
||||
width: 325px;
|
||||
-webkit-transform: translate(0, -webkit-calc(-50% - .5px)) translateZ(0) scale(1, 1);
|
||||
transform: translate(0, calc(-50% - .5px)) translateZ(0) scale(1, 1); }
|
||||
|
||||
.noty_progressbar {
|
||||
display: none; }
|
||||
|
||||
.noty_has_timeout.noty_has_progressbar .noty_progressbar {
|
||||
display: block;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
height: 3px;
|
||||
width: 100%;
|
||||
background-color: #646464;
|
||||
opacity: 0.2;
|
||||
filter: alpha(opacity=10); }
|
||||
|
||||
.noty_bar {
|
||||
-webkit-backface-visibility: hidden;
|
||||
-webkit-transform: translate(0, 0) translateZ(0) scale(1, 1);
|
||||
-ms-transform: translate(0, 0) scale(1, 1);
|
||||
transform: translate(0, 0) scale(1, 1);
|
||||
-webkit-font-smoothing: subpixel-antialiased;
|
||||
overflow: hidden; }
|
||||
|
||||
.noty_effects_open {
|
||||
opacity: 0;
|
||||
-webkit-transform: translate(50%);
|
||||
-ms-transform: translate(50%);
|
||||
transform: translate(50%);
|
||||
-webkit-animation: noty_anim_in 0.5s cubic-bezier(0.68, -0.55, 0.265, 1.55);
|
||||
animation: noty_anim_in 0.5s cubic-bezier(0.68, -0.55, 0.265, 1.55);
|
||||
-webkit-animation-fill-mode: forwards;
|
||||
animation-fill-mode: forwards; }
|
||||
|
||||
.noty_effects_close {
|
||||
-webkit-animation: noty_anim_out 0.5s cubic-bezier(0.68, -0.55, 0.265, 1.55);
|
||||
animation: noty_anim_out 0.5s cubic-bezier(0.68, -0.55, 0.265, 1.55);
|
||||
-webkit-animation-fill-mode: forwards;
|
||||
animation-fill-mode: forwards; }
|
||||
|
||||
.noty_fix_effects_height {
|
||||
-webkit-animation: noty_anim_height 75ms ease-out;
|
||||
animation: noty_anim_height 75ms ease-out; }
|
||||
|
||||
.noty_close_with_click {
|
||||
cursor: pointer; }
|
||||
|
||||
.noty_close_button {
|
||||
position: absolute;
|
||||
top: 2px;
|
||||
right: 2px;
|
||||
font-weight: bold;
|
||||
width: 20px;
|
||||
height: 20px;
|
||||
text-align: center;
|
||||
line-height: 20px;
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
border-radius: 2px;
|
||||
cursor: pointer;
|
||||
-webkit-transition: all .2s ease-out;
|
||||
transition: all .2s ease-out; }
|
||||
|
||||
.noty_close_button:hover {
|
||||
background-color: rgba(0, 0, 0, 0.1); }
|
||||
|
||||
.noty_modal {
|
||||
position: fixed;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
background-color: #000;
|
||||
z-index: 10000;
|
||||
opacity: .3;
|
||||
left: 0;
|
||||
top: 0; }
|
||||
|
||||
.noty_modal.noty_modal_open {
|
||||
opacity: 0;
|
||||
-webkit-animation: noty_modal_in .3s ease-out;
|
||||
animation: noty_modal_in .3s ease-out; }
|
||||
|
||||
.noty_modal.noty_modal_close {
|
||||
-webkit-animation: noty_modal_out .3s ease-out;
|
||||
animation: noty_modal_out .3s ease-out;
|
||||
-webkit-animation-fill-mode: forwards;
|
||||
animation-fill-mode: forwards; }
|
||||
|
||||
@-webkit-keyframes noty_modal_in {
|
||||
100% {
|
||||
opacity: .3; } }
|
||||
|
||||
@keyframes noty_modal_in {
|
||||
100% {
|
||||
opacity: .3; } }
|
||||
|
||||
@-webkit-keyframes noty_modal_out {
|
||||
100% {
|
||||
opacity: 0; } }
|
||||
|
||||
@keyframes noty_modal_out {
|
||||
100% {
|
||||
opacity: 0; } }
|
||||
|
||||
@keyframes noty_modal_out {
|
||||
100% {
|
||||
opacity: 0; } }
|
||||
|
||||
@-webkit-keyframes noty_anim_in {
|
||||
100% {
|
||||
-webkit-transform: translate(0);
|
||||
transform: translate(0);
|
||||
opacity: 1; } }
|
||||
|
||||
@keyframes noty_anim_in {
|
||||
100% {
|
||||
-webkit-transform: translate(0);
|
||||
transform: translate(0);
|
||||
opacity: 1; } }
|
||||
|
||||
@-webkit-keyframes noty_anim_out {
|
||||
100% {
|
||||
-webkit-transform: translate(50%);
|
||||
transform: translate(50%);
|
||||
opacity: 0; } }
|
||||
|
||||
@keyframes noty_anim_out {
|
||||
100% {
|
||||
-webkit-transform: translate(50%);
|
||||
transform: translate(50%);
|
||||
opacity: 0; } }
|
||||
|
||||
@-webkit-keyframes noty_anim_height {
|
||||
100% {
|
||||
height: 0; } }
|
||||
|
||||
@keyframes noty_anim_height {
|
||||
100% {
|
||||
height: 0; } }
|
||||
|
||||
.noty_theme__relax.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
border-radius: 2px;
|
||||
position: relative; }
|
||||
.noty_theme__relax.noty_bar .noty_body {
|
||||
padding: 10px; }
|
||||
.noty_theme__relax.noty_bar .noty_buttons {
|
||||
border-top: 1px solid #e7e7e7;
|
||||
padding: 5px 10px; }
|
||||
|
||||
.noty_theme__relax.noty_type__alert,
|
||||
.noty_theme__relax.noty_type__notification {
|
||||
background-color: #fff;
|
||||
border: 1px solid #dedede;
|
||||
color: #444; }
|
||||
|
||||
.noty_theme__relax.noty_type__warning {
|
||||
background-color: #FFEAA8;
|
||||
border: 1px solid #FFC237;
|
||||
color: #826200; }
|
||||
.noty_theme__relax.noty_type__warning .noty_buttons {
|
||||
border-color: #dfaa30; }
|
||||
|
||||
.noty_theme__relax.noty_type__error {
|
||||
background-color: #FF8181;
|
||||
border: 1px solid #e25353;
|
||||
color: #FFF; }
|
||||
.noty_theme__relax.noty_type__error .noty_buttons {
|
||||
border-color: darkred; }
|
||||
|
||||
.noty_theme__relax.noty_type__info,
|
||||
.noty_theme__relax.noty_type__information {
|
||||
background-color: #78C5E7;
|
||||
border: 1px solid #3badd6;
|
||||
color: #FFF; }
|
||||
.noty_theme__relax.noty_type__info .noty_buttons,
|
||||
.noty_theme__relax.noty_type__information .noty_buttons {
|
||||
border-color: #0B90C4; }
|
||||
|
||||
.noty_theme__relax.noty_type__success {
|
||||
background-color: #BCF5BC;
|
||||
border: 1px solid #7cdd77;
|
||||
color: darkgreen; }
|
||||
.noty_theme__relax.noty_type__success .noty_buttons {
|
||||
border-color: #50C24E; }
|
||||
|
||||
.noty_theme__metroui.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
box-shadow: rgba(0, 0, 0, 0.298039) 0 0 5px 0; }
|
||||
.noty_theme__metroui.noty_bar .noty_progressbar {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
bottom: 0;
|
||||
height: 3px;
|
||||
width: 100%;
|
||||
background-color: #000;
|
||||
opacity: 0.2;
|
||||
filter: alpha(opacity=20); }
|
||||
.noty_theme__metroui.noty_bar .noty_body {
|
||||
padding: 1.25em;
|
||||
font-size: 14px; }
|
||||
.noty_theme__metroui.noty_bar .noty_buttons {
|
||||
padding: 0 10px .5em 10px; }
|
||||
|
||||
.noty_theme__metroui.noty_type__alert,
|
||||
.noty_theme__metroui.noty_type__notification {
|
||||
background-color: #fff;
|
||||
color: #1d1d1d; }
|
||||
|
||||
.noty_theme__metroui.noty_type__warning {
|
||||
background-color: #FA6800;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__metroui.noty_type__error {
|
||||
background-color: #CE352C;
|
||||
color: #FFF; }
|
||||
|
||||
.noty_theme__metroui.noty_type__info,
|
||||
.noty_theme__metroui.noty_type__information {
|
||||
background-color: #1BA1E2;
|
||||
color: #FFF; }
|
||||
|
||||
.noty_theme__metroui.noty_type__success {
|
||||
background-color: #60A917;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__mint.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
border-radius: 2px;
|
||||
position: relative; }
|
||||
.noty_theme__mint.noty_bar .noty_body {
|
||||
padding: 10px;
|
||||
font-size: 14px; }
|
||||
.noty_theme__mint.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
|
||||
.noty_theme__mint.noty_type__alert,
|
||||
.noty_theme__mint.noty_type__notification {
|
||||
background-color: #fff;
|
||||
border-bottom: 1px solid #D1D1D1;
|
||||
color: #2F2F2F; }
|
||||
|
||||
.noty_theme__mint.noty_type__warning {
|
||||
background-color: #FFAE42;
|
||||
border-bottom: 1px solid #E89F3C;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__mint.noty_type__error {
|
||||
background-color: #DE636F;
|
||||
border-bottom: 1px solid #CA5A65;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__mint.noty_type__info,
|
||||
.noty_theme__mint.noty_type__information {
|
||||
background-color: #7F7EFF;
|
||||
border-bottom: 1px solid #7473E8;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__mint.noty_type__success {
|
||||
background-color: #AFC765;
|
||||
border-bottom: 1px solid #A0B55C;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__sunset.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
border-radius: 2px;
|
||||
position: relative; }
|
||||
.noty_theme__sunset.noty_bar .noty_body {
|
||||
padding: 10px;
|
||||
font-size: 14px;
|
||||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.1); }
|
||||
.noty_theme__sunset.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
|
||||
.noty_theme__sunset.noty_type__alert,
|
||||
.noty_theme__sunset.noty_type__notification {
|
||||
background-color: #073B4C;
|
||||
color: #fff; }
|
||||
.noty_theme__sunset.noty_type__alert .noty_progressbar,
|
||||
.noty_theme__sunset.noty_type__notification .noty_progressbar {
|
||||
background-color: #fff; }
|
||||
|
||||
.noty_theme__sunset.noty_type__warning {
|
||||
background-color: #FFD166;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__sunset.noty_type__error {
|
||||
background-color: #EF476F;
|
||||
color: #fff; }
|
||||
.noty_theme__sunset.noty_type__error .noty_progressbar {
|
||||
opacity: .4; }
|
||||
|
||||
.noty_theme__sunset.noty_type__info,
|
||||
.noty_theme__sunset.noty_type__information {
|
||||
background-color: #118AB2;
|
||||
color: #fff; }
|
||||
.noty_theme__sunset.noty_type__info .noty_progressbar,
|
||||
.noty_theme__sunset.noty_type__information .noty_progressbar {
|
||||
opacity: .6; }
|
||||
|
||||
.noty_theme__sunset.noty_type__success {
|
||||
background-color: #06D6A0;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
border: 1px solid transparent;
|
||||
border-radius: 4px; }
|
||||
.noty_theme__bootstrap-v3.noty_bar .noty_body {
|
||||
padding: 15px; }
|
||||
.noty_theme__bootstrap-v3.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
.noty_theme__bootstrap-v3.noty_bar .noty_close_button {
|
||||
font-size: 21px;
|
||||
font-weight: 700;
|
||||
line-height: 1;
|
||||
color: #000;
|
||||
text-shadow: 0 1px 0 #fff;
|
||||
filter: alpha(opacity=20);
|
||||
opacity: .2;
|
||||
background: transparent; }
|
||||
.noty_theme__bootstrap-v3.noty_bar .noty_close_button:hover {
|
||||
background: transparent;
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
filter: alpha(opacity=50);
|
||||
opacity: .5; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_type__alert,
|
||||
.noty_theme__bootstrap-v3.noty_type__notification {
|
||||
background-color: #fff;
|
||||
color: inherit; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_type__warning {
|
||||
background-color: #fcf8e3;
|
||||
color: #8a6d3b;
|
||||
border-color: #faebcc; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_type__error {
|
||||
background-color: #f2dede;
|
||||
color: #a94442;
|
||||
border-color: #ebccd1; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_type__info,
|
||||
.noty_theme__bootstrap-v3.noty_type__information {
|
||||
background-color: #d9edf7;
|
||||
color: #31708f;
|
||||
border-color: #bce8f1; }
|
||||
|
||||
.noty_theme__bootstrap-v3.noty_type__success {
|
||||
background-color: #dff0d8;
|
||||
color: #3c763d;
|
||||
border-color: #d6e9c6; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
border: 1px solid transparent;
|
||||
border-radius: .25rem; }
|
||||
.noty_theme__bootstrap-v4.noty_bar .noty_body {
|
||||
padding: .75rem 1.25rem; }
|
||||
.noty_theme__bootstrap-v4.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
.noty_theme__bootstrap-v4.noty_bar .noty_close_button {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
line-height: 1;
|
||||
color: #000;
|
||||
text-shadow: 0 1px 0 #fff;
|
||||
filter: alpha(opacity=20);
|
||||
opacity: .5;
|
||||
background: transparent; }
|
||||
.noty_theme__bootstrap-v4.noty_bar .noty_close_button:hover {
|
||||
background: transparent;
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
filter: alpha(opacity=50);
|
||||
opacity: .75; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_type__alert,
|
||||
.noty_theme__bootstrap-v4.noty_type__notification {
|
||||
background-color: #fff;
|
||||
color: inherit; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_type__warning {
|
||||
background-color: #fcf8e3;
|
||||
color: #8a6d3b;
|
||||
border-color: #faebcc; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_type__error {
|
||||
background-color: #f2dede;
|
||||
color: #a94442;
|
||||
border-color: #ebccd1; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_type__info,
|
||||
.noty_theme__bootstrap-v4.noty_type__information {
|
||||
background-color: #d9edf7;
|
||||
color: #31708f;
|
||||
border-color: #bce8f1; }
|
||||
|
||||
.noty_theme__bootstrap-v4.noty_type__success {
|
||||
background-color: #dff0d8;
|
||||
color: #3c763d;
|
||||
border-color: #d6e9c6; }
|
||||
|
||||
.noty_theme__semanticui.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
border: 1px solid transparent;
|
||||
font-size: 1em;
|
||||
border-radius: .28571429rem;
|
||||
box-shadow: 0 0 0 1px rgba(34, 36, 38, 0.22) inset, 0 0 0 0 transparent; }
|
||||
.noty_theme__semanticui.noty_bar .noty_body {
|
||||
padding: 1em 1.5em;
|
||||
line-height: 1.4285em; }
|
||||
.noty_theme__semanticui.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
|
||||
.noty_theme__semanticui.noty_type__alert,
|
||||
.noty_theme__semanticui.noty_type__notification {
|
||||
background-color: #f8f8f9;
|
||||
color: rgba(0, 0, 0, 0.87); }
|
||||
|
||||
.noty_theme__semanticui.noty_type__warning {
|
||||
background-color: #fffaf3;
|
||||
color: #573a08;
|
||||
box-shadow: 0 0 0 1px #c9ba9b inset, 0 0 0 0 transparent; }
|
||||
|
||||
.noty_theme__semanticui.noty_type__error {
|
||||
background-color: #fff6f6;
|
||||
color: #9f3a38;
|
||||
box-shadow: 0 0 0 1px #e0b4b4 inset, 0 0 0 0 transparent; }
|
||||
|
||||
.noty_theme__semanticui.noty_type__info,
|
||||
.noty_theme__semanticui.noty_type__information {
|
||||
background-color: #f8ffff;
|
||||
color: #276f86;
|
||||
box-shadow: 0 0 0 1px #a9d5de inset, 0 0 0 0 transparent; }
|
||||
|
||||
.noty_theme__semanticui.noty_type__success {
|
||||
background-color: #fcfff5;
|
||||
color: #2c662d;
|
||||
box-shadow: 0 0 0 1px #a3c293 inset, 0 0 0 0 transparent; }
|
||||
|
||||
.noty_theme__nest.noty_bar {
|
||||
margin: 0 0 15px 0;
|
||||
overflow: hidden;
|
||||
border-radius: 2px;
|
||||
position: relative;
|
||||
box-shadow: rgba(0, 0, 0, 0.098039) 5px 4px 10px 0; }
|
||||
.noty_theme__nest.noty_bar .noty_body {
|
||||
padding: 10px;
|
||||
font-size: 14px;
|
||||
text-shadow: 1px 1px 1px rgba(0, 0, 0, 0.1); }
|
||||
.noty_theme__nest.noty_bar .noty_buttons {
|
||||
padding: 10px; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar {
|
||||
z-index: 5; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar:nth-child(2) {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
margin-top: 4px;
|
||||
margin-right: -4px;
|
||||
margin-left: 4px;
|
||||
z-index: 4;
|
||||
width: 100%; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar:nth-child(3) {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
margin-top: 8px;
|
||||
margin-right: -8px;
|
||||
margin-left: 8px;
|
||||
z-index: 3;
|
||||
width: 100%; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar:nth-child(4) {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
margin-top: 12px;
|
||||
margin-right: -12px;
|
||||
margin-left: 12px;
|
||||
z-index: 2;
|
||||
width: 100%; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar:nth-child(5) {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
margin-top: 16px;
|
||||
margin-right: -16px;
|
||||
margin-left: 16px;
|
||||
z-index: 1;
|
||||
width: 100%; }
|
||||
|
||||
.noty_layout .noty_theme__nest.noty_bar:nth-child(n+6) {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
margin-top: 20px;
|
||||
margin-right: -20px;
|
||||
margin-left: 20px;
|
||||
z-index: -1;
|
||||
width: 100%; }
|
||||
|
||||
#noty_layout__bottomLeft .noty_theme__nest.noty_bar:nth-child(2),
|
||||
#noty_layout__topLeft .noty_theme__nest.noty_bar:nth-child(2) {
|
||||
margin-top: 4px;
|
||||
margin-left: -4px;
|
||||
margin-right: 4px; }
|
||||
|
||||
#noty_layout__bottomLeft .noty_theme__nest.noty_bar:nth-child(3),
|
||||
#noty_layout__topLeft .noty_theme__nest.noty_bar:nth-child(3) {
|
||||
margin-top: 8px;
|
||||
margin-left: -8px;
|
||||
margin-right: 8px; }
|
||||
|
||||
#noty_layout__bottomLeft .noty_theme__nest.noty_bar:nth-child(4),
|
||||
#noty_layout__topLeft .noty_theme__nest.noty_bar:nth-child(4) {
|
||||
margin-top: 12px;
|
||||
margin-left: -12px;
|
||||
margin-right: 12px; }
|
||||
|
||||
#noty_layout__bottomLeft .noty_theme__nest.noty_bar:nth-child(5),
|
||||
#noty_layout__topLeft .noty_theme__nest.noty_bar:nth-child(5) {
|
||||
margin-top: 16px;
|
||||
margin-left: -16px;
|
||||
margin-right: 16px; }
|
||||
|
||||
#noty_layout__bottomLeft .noty_theme__nest.noty_bar:nth-child(n+6),
|
||||
#noty_layout__topLeft .noty_theme__nest.noty_bar:nth-child(n+6) {
|
||||
margin-top: 20px;
|
||||
margin-left: -20px;
|
||||
margin-right: 20px; }
|
||||
|
||||
.noty_theme__nest.noty_type__alert,
|
||||
.noty_theme__nest.noty_type__notification {
|
||||
background-color: #073B4C;
|
||||
color: #fff; }
|
||||
.noty_theme__nest.noty_type__alert .noty_progressbar,
|
||||
.noty_theme__nest.noty_type__notification .noty_progressbar {
|
||||
background-color: #fff; }
|
||||
|
||||
.noty_theme__nest.noty_type__warning {
|
||||
background-color: #FFD166;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__nest.noty_type__error {
|
||||
background-color: #EF476F;
|
||||
color: #fff; }
|
||||
.noty_theme__nest.noty_type__error .noty_progressbar {
|
||||
opacity: .4; }
|
||||
|
||||
.noty_theme__nest.noty_type__info,
|
||||
.noty_theme__nest.noty_type__information {
|
||||
background-color: #118AB2;
|
||||
color: #fff; }
|
||||
.noty_theme__nest.noty_type__info .noty_progressbar,
|
||||
.noty_theme__nest.noty_type__information .noty_progressbar {
|
||||
opacity: .6; }
|
||||
|
||||
.noty_theme__nest.noty_type__success {
|
||||
background-color: #06D6A0;
|
||||
color: #fff; }
|
||||
|
||||
.noty_theme__light.noty_bar {
|
||||
margin: 4px 0;
|
||||
overflow: hidden;
|
||||
border-radius: 2px;
|
||||
position: relative; }
|
||||
.noty_theme__light.noty_bar .noty_body {
|
||||
padding: 10px; }
|
||||
.noty_theme__light.noty_bar .noty_buttons {
|
||||
border-top: 1px solid #e7e7e7;
|
||||
padding: 5px 10px; }
|
||||
|
||||
.noty_theme__light.noty_type__alert,
|
||||
.noty_theme__light.noty_type__notification {
|
||||
background-color: #fff;
|
||||
border: 1px solid #dedede;
|
||||
color: #444; }
|
||||
|
||||
.noty_theme__light.noty_type__warning {
|
||||
background-color: #FFEAA8;
|
||||
border: 1px solid #FFC237;
|
||||
color: #826200; }
|
||||
.noty_theme__light.noty_type__warning .noty_buttons {
|
||||
border-color: #dfaa30; }
|
||||
|
||||
.noty_theme__light.noty_type__error {
|
||||
background-color: #ED7000;
|
||||
border: 1px solid #e25353;
|
||||
color: #FFF; }
|
||||
.noty_theme__light.noty_type__error .noty_buttons {
|
||||
border-color: darkred; }
|
||||
|
||||
.noty_theme__light.noty_type__info,
|
||||
.noty_theme__light.noty_type__information {
|
||||
background-color: #78C5E7;
|
||||
border: 1px solid #3badd6;
|
||||
color: #FFF; }
|
||||
.noty_theme__light.noty_type__info .noty_buttons,
|
||||
.noty_theme__light.noty_type__information .noty_buttons {
|
||||
border-color: #0B90C4; }
|
||||
|
||||
.noty_theme__light.noty_type__success {
|
||||
background-color: #57C880;
|
||||
border: 1px solid #7cdd77;
|
||||
color: darkgreen; }
|
||||
.noty_theme__light.noty_type__success .noty_buttons {
|
||||
border-color: #50C24E; }
|
||||
|
||||
/*# sourceMappingURL=noty.css.map*/
|
17
static/noty/noty.min.js
vendored
Normal file
17
static/noty/noty.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,6 +1,8 @@
|
|||
<html lang="en">
|
||||
<head>
|
||||
<!DOCTYPE html>
|
||||
<link href="{{base_url}}static/noty/noty.css" rel="stylesheet">
|
||||
<script src="{{base_url}}static/noty/noty.min.js" type="text/javascript"></script>
|
||||
<style>
|
||||
#divmenu {
|
||||
background-color: #000000;
|
||||
|
@ -206,4 +208,29 @@
|
|||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<script type="text/javascript">
|
||||
if (location.protocol != 'https:')
|
||||
{
|
||||
var ws = new WebSocket("ws://" + window.location.host + "{{base_url}}websocket");
|
||||
} else {
|
||||
var ws = new WebSocket("wss://" + window.location.host + "{{base_url}}websocket");
|
||||
}
|
||||
|
||||
ws.onmessage = function (evt) {
|
||||
new Noty({
|
||||
text: evt.data,
|
||||
timeout: 3000,
|
||||
progressBar: false,
|
||||
animation: {
|
||||
open: null,
|
||||
close: null
|
||||
},
|
||||
killer: true,
|
||||
type: 'info',
|
||||
layout: 'bottomRight',
|
||||
theme: 'semanticui'
|
||||
}).show();
|
||||
};
|
||||
</script>
|
Loading…
Reference in a new issue