python-engineio-3.11.1/0000755000076600000240000000000013574014331016012 5ustar mgrinbergstaff00000000000000python-engineio-3.11.1/LICENSE0000644000076600000240000000207213564316347017033 0ustar mgrinbergstaff00000000000000The MIT License (MIT) Copyright (c) 2015 Miguel Grinberg Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-engineio-3.11.1/MANIFEST.in0000644000076600000240000000003213564316347017556 0ustar mgrinbergstaff00000000000000include README.md LICENSE python-engineio-3.11.1/PKG-INFO0000644000076600000240000000336613574014331017117 0ustar mgrinbergstaff00000000000000Metadata-Version: 2.1 Name: python-engineio Version: 3.11.1 Summary: Engine.IO server Home-page: http://github.com/miguelgrinberg/python-engineio/ Author: Miguel Grinberg Author-email: miguelgrinberg50@gmail.com License: MIT Description: python-engineio =============== .. image:: https://travis-ci.org/miguelgrinberg/python-engineio.svg?branch=master :target: https://travis-ci.org/miguelgrinberg/python-engineio Python implementation of the `Engine.IO`_ realtime client and server. Resources --------- - `Documentation`_ - `PyPI`_ - `Change Log`_ - Questions? See the `questions`_ others have asked on Stack Overflow, or `ask`_ your own question. .. _Engine.IO: https://github.com/socketio/engine.io .. _Documentation: https://python-engineio.readthedocs.io/en/latest/ .. _PyPI: https://pypi.python.org/pypi/python-engineio .. _Change Log: https://github.com/miguelgrinberg/python-engineio/blob/master/CHANGES.md .. _questions: https://stackoverflow.com/questions/tagged/python-socketio .. _ask: https://stackoverflow.com/questions/ask?tags=python+python-socketio Platform: any Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content Classifier: Topic :: Software Development :: Libraries :: Python Modules Provides-Extra: asyncio_client Provides-Extra: client python-engineio-3.11.1/README.rst0000644000076600000240000000151113564316347017512 0ustar mgrinbergstaff00000000000000python-engineio =============== .. image:: https://travis-ci.org/miguelgrinberg/python-engineio.svg?branch=master :target: https://travis-ci.org/miguelgrinberg/python-engineio Python implementation of the `Engine.IO`_ realtime client and server. Resources --------- - `Documentation`_ - `PyPI`_ - `Change Log`_ - Questions? See the `questions`_ others have asked on Stack Overflow, or `ask`_ your own question. .. _Engine.IO: https://github.com/socketio/engine.io .. _Documentation: https://python-engineio.readthedocs.io/en/latest/ .. _PyPI: https://pypi.python.org/pypi/python-engineio .. _Change Log: https://github.com/miguelgrinberg/python-engineio/blob/master/CHANGES.md .. _questions: https://stackoverflow.com/questions/tagged/python-socketio .. _ask: https://stackoverflow.com/questions/ask?tags=python+python-socketio python-engineio-3.11.1/engineio/0000755000076600000240000000000013574014331017607 5ustar mgrinbergstaff00000000000000python-engineio-3.11.1/engineio/__init__.py0000644000076600000240000000142313574014330021717 0ustar mgrinbergstaff00000000000000import sys from .client import Client from .middleware import WSGIApp, Middleware from .server import Server if sys.version_info >= (3, 5): # pragma: no cover from .asyncio_server import AsyncServer from .asyncio_client import AsyncClient from .async_drivers.asgi import ASGIApp try: from .async_drivers.tornado import get_tornado_handler except ImportError: get_tornado_handler = None else: # pragma: no cover AsyncServer = None AsyncClient = None get_tornado_handler = None ASGIApp = None __version__ = '3.11.1' __all__ = ['__version__', 'Server', 'WSGIApp', 'Middleware', 'Client'] if AsyncServer is not None: # pragma: no cover __all__ += ['AsyncServer', 'ASGIApp', 'get_tornado_handler', 'AsyncClient'], python-engineio-3.11.1/engineio/async_drivers/0000755000076600000240000000000013574014331022462 5ustar mgrinbergstaff00000000000000python-engineio-3.11.1/engineio/async_drivers/__init__.py0000644000076600000240000000000013564316347024574 0ustar mgrinbergstaff00000000000000python-engineio-3.11.1/engineio/async_drivers/aiohttp.py0000644000076600000240000000730113564316347024520 0ustar mgrinbergstaff00000000000000import asyncio import sys from urllib.parse import urlsplit from aiohttp.web import Response, WebSocketResponse import six def create_route(app, engineio_server, engineio_endpoint): """This function sets up the engine.io endpoint as a route for the application. Note that both GET and POST requests must be hooked up on the engine.io endpoint. """ app.router.add_get(engineio_endpoint, engineio_server.handle_request) app.router.add_post(engineio_endpoint, engineio_server.handle_request) app.router.add_route('OPTIONS', engineio_endpoint, engineio_server.handle_request) def translate_request(request): """This function takes the arguments passed to the request handler and uses them to generate a WSGI compatible environ dictionary. """ message = request._message payload = request._payload uri_parts = urlsplit(message.path) environ = { 'wsgi.input': payload, 'wsgi.errors': sys.stderr, 'wsgi.version': (1, 0), 'wsgi.async': True, 'wsgi.multithread': False, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': 'aiohttp', 'REQUEST_METHOD': message.method, 'QUERY_STRING': uri_parts.query or '', 'RAW_URI': message.path, 'SERVER_PROTOCOL': 'HTTP/%s.%s' % message.version, 'REMOTE_ADDR': '127.0.0.1', 'REMOTE_PORT': '0', 'SERVER_NAME': 'aiohttp', 'SERVER_PORT': '0', 'aiohttp.request': request } for hdr_name, hdr_value in message.headers.items(): hdr_name = hdr_name.upper() if hdr_name == 'CONTENT-TYPE': environ['CONTENT_TYPE'] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH': environ['CONTENT_LENGTH'] = hdr_value continue key = 'HTTP_%s' % hdr_name.replace('-', '_') if key in environ: hdr_value = '%s,%s' % (environ[key], hdr_value) environ[key] = hdr_value environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') path_info = uri_parts.path environ['PATH_INFO'] = path_info environ['SCRIPT_NAME'] = '' return environ def make_response(status, headers, payload, environ): """This function generates an appropriate response object for this async mode. """ return Response(body=payload, status=int(status.split()[0]), headers=headers) class WebSocket(object): # pragma: no cover """ This wrapper class provides a aiohttp WebSocket interface that is somewhat compatible with eventlet's implementation. """ def __init__(self, handler): self.handler = handler self._sock = None async def __call__(self, environ): request = environ['aiohttp.request'] self._sock = WebSocketResponse() await self._sock.prepare(request) self.environ = environ await self.handler(self) return self._sock async def close(self): await self._sock.close() async def send(self, message): if isinstance(message, bytes): f = self._sock.send_bytes else: f = self._sock.send_str if asyncio.iscoroutinefunction(f): await f(message) else: f(message) async def wait(self): msg = await self._sock.receive() if not isinstance(msg.data, six.binary_type) and \ not isinstance(msg.data, six.text_type): raise IOError() return msg.data _async = { 'asyncio': True, 'create_route': create_route, 'translate_request': translate_request, 'make_response': make_response, 'websocket': WebSocket, } python-engineio-3.11.1/engineio/async_drivers/asgi.py0000644000076600000240000001751213564316347024000 0ustar mgrinbergstaff00000000000000import os import sys from engineio.static_files import get_static_file class ASGIApp: """ASGI application middleware for Engine.IO. This middleware dispatches traffic to an Engine.IO application. It can also serve a list of static files to the client, or forward unrelated HTTP traffic to another ASGI application. :param engineio_server: The Engine.IO server. Must be an instance of the ``engineio.AsyncServer`` class. :param static_files: A dictionary with static file mapping rules. See the documentation for details on this argument. :param other_asgi_app: A separate ASGI app that receives all other traffic. :param engineio_path: The endpoint where the Engine.IO application should be installed. The default value is appropriate for most cases. Example usage:: import engineio import uvicorn eio = engineio.AsyncServer() app = engineio.ASGIApp(eio, static_files={ '/': {'content_type': 'text/html', 'filename': 'index.html'}, '/index.html': {'content_type': 'text/html', 'filename': 'index.html'}, }) uvicorn.run(app, '127.0.0.1', 5000) """ def __init__(self, engineio_server, other_asgi_app=None, static_files=None, engineio_path='engine.io'): self.engineio_server = engineio_server self.other_asgi_app = other_asgi_app self.engineio_path = engineio_path.strip('/') self.static_files = static_files or {} async def __call__(self, scope, receive, send): if scope['type'] in ['http', 'websocket'] and \ scope['path'].startswith('/{0}/'.format(self.engineio_path)): await self.engineio_server.handle_request(scope, receive, send) else: static_file = get_static_file(scope['path'], self.static_files) \ if scope['type'] == 'http' and self.static_files else None if static_file: await self.serve_static_file(static_file, receive, send) elif self.other_asgi_app is not None: await self.other_asgi_app(scope, receive, send) elif scope['type'] == 'lifespan': await self.lifespan(receive, send) else: await self.not_found(receive, send) async def serve_static_file(self, static_file, receive, send): # pragma: no cover event = await receive() if event['type'] == 'http.request': if os.path.exists(static_file['filename']): with open(static_file['filename'], 'rb') as f: payload = f.read() await send({'type': 'http.response.start', 'status': 200, 'headers': [(b'Content-Type', static_file[ 'content_type'].encode('utf-8'))]}) await send({'type': 'http.response.body', 'body': payload}) else: await self.not_found(receive, send) async def lifespan(self, receive, send): event = await receive() if event['type'] == 'lifespan.startup': await send({'type': 'lifespan.startup.complete'}) elif event['type'] == 'lifespan.shutdown': await send({'type': 'lifespan.shutdown.complete'}) async def not_found(self, receive, send): """Return a 404 Not Found error to the client.""" await send({'type': 'http.response.start', 'status': 404, 'headers': [(b'Content-Type', b'text/plain')]}) await send({'type': 'http.response.body', 'body': b'Not Found'}) async def translate_request(scope, receive, send): class AwaitablePayload(object): # pragma: no cover def __init__(self, payload): self.payload = payload or b'' async def read(self, length=None): if length is None: r = self.payload self.payload = b'' else: r = self.payload[:length] self.payload = self.payload[length:] return r event = await receive() payload = b'' if event['type'] == 'http.request': payload += event.get('body') or b'' while event.get('more_body'): event = await receive() if event['type'] == 'http.request': payload += event.get('body') or b'' elif event['type'] == 'websocket.connect': await send({'type': 'websocket.accept'}) else: return {} raw_uri = scope['path'].encode('utf-8') if 'query_string' in scope and scope['query_string']: raw_uri += b'?' + scope['query_string'] environ = { 'wsgi.input': AwaitablePayload(payload), 'wsgi.errors': sys.stderr, 'wsgi.version': (1, 0), 'wsgi.async': True, 'wsgi.multithread': False, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': 'asgi', 'REQUEST_METHOD': scope.get('method', 'GET'), 'PATH_INFO': scope['path'], 'QUERY_STRING': scope.get('query_string', b'').decode('utf-8'), 'RAW_URI': raw_uri.decode('utf-8'), 'SCRIPT_NAME': '', 'SERVER_PROTOCOL': 'HTTP/1.1', 'REMOTE_ADDR': '127.0.0.1', 'REMOTE_PORT': '0', 'SERVER_NAME': 'asgi', 'SERVER_PORT': '0', 'asgi.receive': receive, 'asgi.send': send, } for hdr_name, hdr_value in scope['headers']: hdr_name = hdr_name.upper().decode('utf-8') hdr_value = hdr_value.decode('utf-8') if hdr_name == 'CONTENT-TYPE': environ['CONTENT_TYPE'] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH': environ['CONTENT_LENGTH'] = hdr_value continue key = 'HTTP_%s' % hdr_name.replace('-', '_') if key in environ: hdr_value = '%s,%s' % (environ[key], hdr_value) environ[key] = hdr_value environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') return environ async def make_response(status, headers, payload, environ): headers = [(h[0].encode('utf-8'), h[1].encode('utf-8')) for h in headers] await environ['asgi.send']({'type': 'http.response.start', 'status': int(status.split(' ')[0]), 'headers': headers}) await environ['asgi.send']({'type': 'http.response.body', 'body': payload}) class WebSocket(object): # pragma: no cover """ This wrapper class provides an asgi WebSocket interface that is somewhat compatible with eventlet's implementation. """ def __init__(self, handler): self.handler = handler self.asgi_receive = None self.asgi_send = None async def __call__(self, environ): self.asgi_receive = environ['asgi.receive'] self.asgi_send = environ['asgi.send'] await self.handler(self) async def close(self): await self.asgi_send({'type': 'websocket.close'}) async def send(self, message): msg_bytes = None msg_text = None if isinstance(message, bytes): msg_bytes = message else: msg_text = message await self.asgi_send({'type': 'websocket.send', 'bytes': msg_bytes, 'text': msg_text}) async def wait(self): event = await self.asgi_receive() if event['type'] != 'websocket.receive': raise IOError() return event.get('bytes') or event.get('text') _async = { 'asyncio': True, 'translate_request': translate_request, 'make_response': make_response, 'websocket': WebSocket, } python-engineio-3.11.1/engineio/async_drivers/eventlet.py0000644000076600000240000000172513564316347024702 0ustar mgrinbergstaff00000000000000from __future__ import absolute_import from eventlet.green.threading import Thread, Event from eventlet import queue from eventlet import sleep from eventlet.websocket import WebSocketWSGI as _WebSocketWSGI class WebSocketWSGI(_WebSocketWSGI): def __init__(self, *args, **kwargs): super(WebSocketWSGI, self).__init__(*args, **kwargs) self._sock = None def __call__(self, environ, start_response): if 'eventlet.input' not in environ: raise RuntimeError('You need to use the eventlet server. ' 'See the Deployment section of the ' 'documentation for more information.') self._sock = environ['eventlet.input'].get_socket() return super(WebSocketWSGI, self).__call__(environ, start_response) _async = { 'thread': Thread, 'queue': queue.Queue, 'queue_empty': queue.Empty, 'event': Event, 'websocket': WebSocketWSGI, 'sleep': sleep, } python-engineio-3.11.1/engineio/async_drivers/gevent.py0000644000076600000240000000341313564316347024340 0ustar mgrinbergstaff00000000000000from __future__ import absolute_import import gevent from gevent import queue from gevent.event import Event try: import geventwebsocket # noqa _websocket_available = True except ImportError: _websocket_available = False class Thread(gevent.Greenlet): # pragma: no cover """ This wrapper class provides gevent Greenlet interface that is compatible with the standard library's Thread class. """ def __init__(self, target, args=[], kwargs={}): super(Thread, self).__init__(target, *args, **kwargs) def _run(self): return self.run() class WebSocketWSGI(object): # pragma: no cover """ This wrapper class provides a gevent WebSocket interface that is compatible with eventlet's implementation. """ def __init__(self, app): self.app = app def __call__(self, environ, start_response): if 'wsgi.websocket' not in environ: raise RuntimeError('You need to use the gevent-websocket server. ' 'See the Deployment section of the ' 'documentation for more information.') self._sock = environ['wsgi.websocket'] self.environ = environ self.version = self._sock.version self.path = self._sock.path self.origin = self._sock.origin self.protocol = self._sock.protocol return self.app(self) def close(self): return self._sock.close() def send(self, message): return self._sock.send(message) def wait(self): return self._sock.receive() _async = { 'thread': Thread, 'queue': queue.JoinableQueue, 'queue_empty': queue.Empty, 'event': Event, 'websocket': WebSocketWSGI if _websocket_available else None, 'sleep': gevent.sleep, } python-engineio-3.11.1/engineio/async_drivers/gevent_uwsgi.py0000644000076600000240000001243413564316347025561 0ustar mgrinbergstaff00000000000000from __future__ import absolute_import import six import gevent from gevent import queue from gevent.event import Event import uwsgi _websocket_available = hasattr(uwsgi, 'websocket_handshake') class Thread(gevent.Greenlet): # pragma: no cover """ This wrapper class provides gevent Greenlet interface that is compatible with the standard library's Thread class. """ def __init__(self, target, args=[], kwargs={}): super(Thread, self).__init__(target, *args, **kwargs) def _run(self): return self.run() class uWSGIWebSocket(object): # pragma: no cover """ This wrapper class provides a uWSGI WebSocket interface that is compatible with eventlet's implementation. """ def __init__(self, app): self.app = app self._sock = None def __call__(self, environ, start_response): self._sock = uwsgi.connection_fd() self.environ = environ uwsgi.websocket_handshake() self._req_ctx = None if hasattr(uwsgi, 'request_context'): # uWSGI >= 2.1.x with support for api access across-greenlets self._req_ctx = uwsgi.request_context() else: # use event and queue for sending messages from gevent.event import Event from gevent.queue import Queue from gevent.select import select self._event = Event() self._send_queue = Queue() # spawn a select greenlet def select_greenlet_runner(fd, event): """Sets event when data becomes available to read on fd.""" while True: event.set() try: select([fd], [], [])[0] except ValueError: break self._select_greenlet = gevent.spawn( select_greenlet_runner, self._sock, self._event) self.app(self) def close(self): """Disconnects uWSGI from the client.""" uwsgi.disconnect() if self._req_ctx is None: # better kill it here in case wait() is not called again self._select_greenlet.kill() self._event.set() def _send(self, msg): """Transmits message either in binary or UTF-8 text mode, depending on its type.""" if isinstance(msg, six.binary_type): method = uwsgi.websocket_send_binary else: method = uwsgi.websocket_send if self._req_ctx is not None: method(msg, request_context=self._req_ctx) else: method(msg) def _decode_received(self, msg): """Returns either bytes or str, depending on message type.""" if not isinstance(msg, six.binary_type): # already decoded - do nothing return msg # only decode from utf-8 if message is not binary data type = six.byte2int(msg[0:1]) if type >= 48: # no binary return msg.decode('utf-8') # binary message, don't try to decode return msg def send(self, msg): """Queues a message for sending. Real transmission is done in wait method. Sends directly if uWSGI version is new enough.""" if self._req_ctx is not None: self._send(msg) else: self._send_queue.put(msg) self._event.set() def wait(self): """Waits and returns received messages. If running in compatibility mode for older uWSGI versions, it also sends messages that have been queued by send(). A return value of None means that connection was closed. This must be called repeatedly. For uWSGI < 2.1.x it must be called from the main greenlet.""" while True: if self._req_ctx is not None: try: msg = uwsgi.websocket_recv(request_context=self._req_ctx) except IOError: # connection closed return None return self._decode_received(msg) else: # we wake up at least every 3 seconds to let uWSGI # do its ping/ponging event_set = self._event.wait(timeout=3) if event_set: self._event.clear() # maybe there is something to send msgs = [] while True: try: msgs.append(self._send_queue.get(block=False)) except gevent.queue.Empty: break for msg in msgs: self._send(msg) # maybe there is something to receive, if not, at least # ensure uWSGI does its ping/ponging try: msg = uwsgi.websocket_recv_nb() except IOError: # connection closed self._select_greenlet.kill() return None if msg: # message available return self._decode_received(msg) _async = { 'thread': Thread, 'queue': queue.JoinableQueue, 'queue_empty': queue.Empty, 'event': Event, 'websocket': uWSGIWebSocket if _websocket_available else None, 'sleep': gevent.sleep, } python-engineio-3.11.1/engineio/async_drivers/sanic.py0000644000076600000240000001035013564316347024143 0ustar mgrinbergstaff00000000000000import sys from urllib.parse import urlsplit from sanic.response import HTTPResponse try: from sanic.websocket import WebSocketProtocol except ImportError: # the installed version of sanic does not have websocket support WebSocketProtocol = None import six def create_route(app, engineio_server, engineio_endpoint): """This function sets up the engine.io endpoint as a route for the application. Note that both GET and POST requests must be hooked up on the engine.io endpoint. """ app.add_route(engineio_server.handle_request, engineio_endpoint, methods=['GET', 'POST', 'OPTIONS']) try: app.enable_websocket() except AttributeError: # ignore, this version does not support websocket pass def translate_request(request): """This function takes the arguments passed to the request handler and uses them to generate a WSGI compatible environ dictionary. """ class AwaitablePayload(object): def __init__(self, payload): self.payload = payload or b'' async def read(self, length=None): if length is None: r = self.payload self.payload = b'' else: r = self.payload[:length] self.payload = self.payload[length:] return r uri_parts = urlsplit(request.url) environ = { 'wsgi.input': AwaitablePayload(request.body), 'wsgi.errors': sys.stderr, 'wsgi.version': (1, 0), 'wsgi.async': True, 'wsgi.multithread': False, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': 'sanic', 'REQUEST_METHOD': request.method, 'QUERY_STRING': uri_parts.query or '', 'RAW_URI': request.url, 'SERVER_PROTOCOL': 'HTTP/' + request.version, 'REMOTE_ADDR': '127.0.0.1', 'REMOTE_PORT': '0', 'SERVER_NAME': 'sanic', 'SERVER_PORT': '0', 'sanic.request': request } for hdr_name, hdr_value in request.headers.items(): hdr_name = hdr_name.upper() if hdr_name == 'CONTENT-TYPE': environ['CONTENT_TYPE'] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH': environ['CONTENT_LENGTH'] = hdr_value continue key = 'HTTP_%s' % hdr_name.replace('-', '_') if key in environ: hdr_value = '%s,%s' % (environ[key], hdr_value) environ[key] = hdr_value environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') path_info = uri_parts.path environ['PATH_INFO'] = path_info environ['SCRIPT_NAME'] = '' return environ def make_response(status, headers, payload, environ): """This function generates an appropriate response object for this async mode. """ headers_dict = {} content_type = None for h in headers: if h[0].lower() == 'content-type': content_type = h[1] else: headers_dict[h[0]] = h[1] return HTTPResponse(body_bytes=payload, content_type=content_type, status=int(status.split()[0]), headers=headers_dict) class WebSocket(object): # pragma: no cover """ This wrapper class provides a sanic WebSocket interface that is somewhat compatible with eventlet's implementation. """ def __init__(self, handler): self.handler = handler self._sock = None async def __call__(self, environ): request = environ['sanic.request'] protocol = request.transport.get_protocol() self._sock = await protocol.websocket_handshake(request) self.environ = environ await self.handler(self) async def close(self): await self._sock.close() async def send(self, message): await self._sock.send(message) async def wait(self): data = await self._sock.recv() if not isinstance(data, six.binary_type) and \ not isinstance(data, six.text_type): raise IOError() return data _async = { 'asyncio': True, 'create_route': create_route, 'translate_request': translate_request, 'make_response': make_response, 'websocket': WebSocket if WebSocketProtocol else None, } python-engineio-3.11.1/engineio/async_drivers/threading.py0000644000076600000240000000052313564316347025014 0ustar mgrinbergstaff00000000000000from __future__ import absolute_import import threading import time try: import queue except ImportError: # pragma: no cover import Queue as queue _async = { 'thread': threading.Thread, 'queue': queue.Queue, 'queue_empty': queue.Empty, 'event': threading.Event, 'websocket': None, 'sleep': time.sleep, } python-engineio-3.11.1/engineio/async_drivers/tornado.py0000644000076600000240000001352313564316347024521 0ustar mgrinbergstaff00000000000000import asyncio import sys from urllib.parse import urlsplit from .. import exceptions import tornado.web import tornado.websocket import six def get_tornado_handler(engineio_server): class Handler(tornado.websocket.WebSocketHandler): # pragma: no cover def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if isinstance(engineio_server.cors_allowed_origins, six.string_types): if engineio_server.cors_allowed_origins == '*': self.allowed_origins = None else: self.allowed_origins = [ engineio_server.cors_allowed_origins] else: self.allowed_origins = engineio_server.cors_allowed_origins self.receive_queue = asyncio.Queue() async def get(self, *args, **kwargs): if self.request.headers.get('Upgrade', '').lower() == 'websocket': ret = super().get(*args, **kwargs) if asyncio.iscoroutine(ret): await ret else: await engineio_server.handle_request(self) async def open(self, *args, **kwargs): # this is the handler for the websocket request asyncio.ensure_future(engineio_server.handle_request(self)) async def post(self, *args, **kwargs): await engineio_server.handle_request(self) async def options(self, *args, **kwargs): await engineio_server.handle_request(self) async def on_message(self, message): await self.receive_queue.put(message) async def get_next_message(self): return await self.receive_queue.get() def on_close(self): self.receive_queue.put_nowait(None) def check_origin(self, origin): if self.allowed_origins is None or origin in self.allowed_origins: return True return super().check_origin(origin) def get_compression_options(self): # enable compression return {} return Handler def translate_request(handler): """This function takes the arguments passed to the request handler and uses them to generate a WSGI compatible environ dictionary. """ class AwaitablePayload(object): def __init__(self, payload): self.payload = payload or b'' async def read(self, length=None): if length is None: r = self.payload self.payload = b'' else: r = self.payload[:length] self.payload = self.payload[length:] return r payload = handler.request.body uri_parts = urlsplit(handler.request.path) full_uri = handler.request.path if handler.request.query: # pragma: no cover full_uri += '?' + handler.request.query environ = { 'wsgi.input': AwaitablePayload(payload), 'wsgi.errors': sys.stderr, 'wsgi.version': (1, 0), 'wsgi.async': True, 'wsgi.multithread': False, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': 'aiohttp', 'REQUEST_METHOD': handler.request.method, 'QUERY_STRING': handler.request.query or '', 'RAW_URI': full_uri, 'SERVER_PROTOCOL': 'HTTP/%s' % handler.request.version, 'REMOTE_ADDR': '127.0.0.1', 'REMOTE_PORT': '0', 'SERVER_NAME': 'aiohttp', 'SERVER_PORT': '0', 'tornado.handler': handler } for hdr_name, hdr_value in handler.request.headers.items(): hdr_name = hdr_name.upper() if hdr_name == 'CONTENT-TYPE': environ['CONTENT_TYPE'] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH': environ['CONTENT_LENGTH'] = hdr_value continue key = 'HTTP_%s' % hdr_name.replace('-', '_') environ[key] = hdr_value environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') path_info = uri_parts.path environ['PATH_INFO'] = path_info environ['SCRIPT_NAME'] = '' return environ def make_response(status, headers, payload, environ): """This function generates an appropriate response object for this async mode. """ tornado_handler = environ['tornado.handler'] try: tornado_handler.set_status(int(status.split()[0])) except RuntimeError: # pragma: no cover # for websocket connections Tornado does not accept a response, since # it already emitted the 101 status code return for header, value in headers: tornado_handler.set_header(header, value) tornado_handler.write(payload) tornado_handler.finish() class WebSocket(object): # pragma: no cover """ This wrapper class provides a tornado WebSocket interface that is somewhat compatible with eventlet's implementation. """ def __init__(self, handler): self.handler = handler self.tornado_handler = None async def __call__(self, environ): self.tornado_handler = environ['tornado.handler'] self.environ = environ await self.handler(self) async def close(self): self.tornado_handler.close() async def send(self, message): try: self.tornado_handler.write_message( message, binary=isinstance(message, bytes)) except tornado.websocket.WebSocketClosedError: raise exceptions.EngineIOError() async def wait(self): msg = await self.tornado_handler.get_next_message() if not isinstance(msg, six.binary_type) and \ not isinstance(msg, six.text_type): raise IOError() return msg _async = { 'asyncio': True, 'translate_request': translate_request, 'make_response': make_response, 'websocket': WebSocket, } python-engineio-3.11.1/engineio/asyncio_client.py0000644000076600000240000005771213573736135023214 0ustar mgrinbergstaff00000000000000import asyncio import ssl try: import aiohttp except ImportError: # pragma: no cover aiohttp = None import six from . import client from . import exceptions from . import packet from . import payload class AsyncClient(client.Client): """An Engine.IO client for asyncio. This class implements a fully compliant Engine.IO web client with support for websocket and long-polling transports, compatible with the asyncio framework on Python 3.5 or newer. :param logger: To enable logging set to ``True`` or pass a logger object to use. To disable logging set to ``False``. The default is ``False``. :param json: An alternative json module to use for encoding and decoding packets. Custom json modules must have ``dumps`` and ``loads`` functions that are compatible with the standard library versions. :param request_timeout: A timeout in seconds for requests. The default is 5 seconds. :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to skip SSL certificate verification, allowing connections to servers with self signed certificates. The default is ``True``. """ def is_asyncio_based(self): return True async def connect(self, url, headers={}, transports=None, engineio_path='engine.io'): """Connect to an Engine.IO server. :param url: The URL of the Engine.IO server. It can include custom query string parameters if required by the server. :param headers: A dictionary with custom headers to send with the connection request. :param transports: The list of allowed transports. Valid transports are ``'polling'`` and ``'websocket'``. If not given, the polling transport is connected first, then an upgrade to websocket is attempted. :param engineio_path: The endpoint where the Engine.IO server is installed. The default value is appropriate for most cases. Note: this method is a coroutine. Example usage:: eio = engineio.Client() await eio.connect('http://localhost:5000') """ if self.state != 'disconnected': raise ValueError('Client is not in a disconnected state') valid_transports = ['polling', 'websocket'] if transports is not None: if isinstance(transports, six.text_type): transports = [transports] transports = [transport for transport in transports if transport in valid_transports] if not transports: raise ValueError('No valid transports provided') self.transports = transports or valid_transports self.queue = self.create_queue() return await getattr(self, '_connect_' + self.transports[0])( url, headers, engineio_path) async def wait(self): """Wait until the connection with the server ends. Client applications can use this function to block the main thread during the life of the connection. Note: this method is a coroutine. """ if self.read_loop_task: await self.read_loop_task async def send(self, data, binary=None): """Send a message to a client. :param data: The data to send to the client. Data can be of type ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` or ``dict``, the data will be serialized as JSON. :param binary: ``True`` to send packet as binary, ``False`` to send as text. If not given, unicode (Python 2) and str (Python 3) are sent as text, and str (Python 2) and bytes (Python 3) are sent as binary. Note: this method is a coroutine. """ await self._send_packet(packet.Packet(packet.MESSAGE, data=data, binary=binary)) async def disconnect(self, abort=False): """Disconnect from the server. :param abort: If set to ``True``, do not wait for background tasks associated with the connection to end. Note: this method is a coroutine. """ if self.state == 'connected': await self._send_packet(packet.Packet(packet.CLOSE)) await self.queue.put(None) self.state = 'disconnecting' await self._trigger_event('disconnect', run_async=False) if self.current_transport == 'websocket': await self.ws.close() if not abort: await self.read_loop_task self.state = 'disconnected' try: client.connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() def start_background_task(self, target, *args, **kwargs): """Start a background task. This is a utility function that applications can use to start a background task. :param target: the target function to execute. :param args: arguments to pass to the function. :param kwargs: keyword arguments to pass to the function. This function returns an object compatible with the `Thread` class in the Python standard library. The `start()` method on this object is already called by this function. Note: this method is a coroutine. """ return asyncio.ensure_future(target(*args, **kwargs)) async def sleep(self, seconds=0): """Sleep for the requested amount of time. Note: this method is a coroutine. """ return await asyncio.sleep(seconds) def create_queue(self): """Create a queue object.""" q = asyncio.Queue() q.Empty = asyncio.QueueEmpty return q def create_event(self): """Create an event object.""" return asyncio.Event() def _reset(self): if self.http: # pragma: no cover asyncio.ensure_future(self.http.close()) super()._reset() async def _connect_polling(self, url, headers, engineio_path): """Establish a long-polling connection to the Engine.IO server.""" if aiohttp is None: # pragma: no cover self.logger.error('aiohttp not installed -- cannot make HTTP ' 'requests!') return self.base_url = self._get_engineio_url(url, engineio_path, 'polling') self.logger.info('Attempting polling connection to ' + self.base_url) r = await self._send_request( 'GET', self.base_url + self._get_url_timestamp(), headers=headers, timeout=self.request_timeout) if r is None: self._reset() raise exceptions.ConnectionError( 'Connection refused by the server') if r.status < 200 or r.status >= 300: raise exceptions.ConnectionError( 'Unexpected status code {} in server response'.format( r.status)) try: p = payload.Payload(encoded_payload=await r.read()) except ValueError: six.raise_from(exceptions.ConnectionError( 'Unexpected response from server'), None) open_packet = p.packets[0] if open_packet.packet_type != packet.OPEN: raise exceptions.ConnectionError( 'OPEN packet not returned by server') self.logger.info( 'Polling connection accepted with ' + str(open_packet.data)) self.sid = open_packet.data['sid'] self.upgrades = open_packet.data['upgrades'] self.ping_interval = open_packet.data['pingInterval'] / 1000.0 self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 self.current_transport = 'polling' self.base_url += '&sid=' + self.sid self.state = 'connected' client.connected_clients.append(self) await self._trigger_event('connect', run_async=False) for pkt in p.packets[1:]: await self._receive_packet(pkt) if 'websocket' in self.upgrades and 'websocket' in self.transports: # attempt to upgrade to websocket if await self._connect_websocket(url, headers, engineio_path): # upgrade to websocket succeeded, we're done here return self.ping_loop_task = self.start_background_task(self._ping_loop) self.write_loop_task = self.start_background_task(self._write_loop) self.read_loop_task = self.start_background_task( self._read_loop_polling) async def _connect_websocket(self, url, headers, engineio_path): """Establish or upgrade to a WebSocket connection with the server.""" if aiohttp is None: # pragma: no cover self.logger.error('aiohttp package not installed') return False websocket_url = self._get_engineio_url(url, engineio_path, 'websocket') if self.sid: self.logger.info( 'Attempting WebSocket upgrade to ' + websocket_url) upgrade = True websocket_url += '&sid=' + self.sid else: upgrade = False self.base_url = websocket_url self.logger.info( 'Attempting WebSocket connection to ' + websocket_url) try: if not self.ssl_verify: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE ws = await self.http.ws_connect( websocket_url + self._get_url_timestamp(), headers=headers, ssl=ssl_context) else: ws = await self.http.ws_connect( websocket_url + self._get_url_timestamp(), headers=headers) except (aiohttp.client_exceptions.WSServerHandshakeError, aiohttp.client_exceptions.ServerConnectionError): if upgrade: self.logger.warning( 'WebSocket upgrade failed: connection error') return False else: raise exceptions.ConnectionError('Connection error') if upgrade: p = packet.Packet(packet.PING, data='probe').encode( always_bytes=False) try: await ws.send_str(p) except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected send exception: %s', str(e)) return False try: p = (await ws.receive()).data except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected recv exception: %s', str(e)) return False pkt = packet.Packet(encoded_packet=p) if pkt.packet_type != packet.PONG or pkt.data != 'probe': self.logger.warning( 'WebSocket upgrade failed: no PONG packet') return False p = packet.Packet(packet.UPGRADE).encode(always_bytes=False) try: await ws.send_str(p) except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected send exception: %s', str(e)) return False self.current_transport = 'websocket' self.logger.info('WebSocket upgrade was successful') else: try: p = (await ws.receive()).data except Exception as e: # pragma: no cover raise exceptions.ConnectionError( 'Unexpected recv exception: ' + str(e)) open_packet = packet.Packet(encoded_packet=p) if open_packet.packet_type != packet.OPEN: raise exceptions.ConnectionError('no OPEN packet') self.logger.info( 'WebSocket connection accepted with ' + str(open_packet.data)) self.sid = open_packet.data['sid'] self.upgrades = open_packet.data['upgrades'] self.ping_interval = open_packet.data['pingInterval'] / 1000.0 self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 self.current_transport = 'websocket' self.state = 'connected' client.connected_clients.append(self) await self._trigger_event('connect', run_async=False) self.ws = ws self.ping_loop_task = self.start_background_task(self._ping_loop) self.write_loop_task = self.start_background_task(self._write_loop) self.read_loop_task = self.start_background_task( self._read_loop_websocket) return True async def _receive_packet(self, pkt): """Handle incoming packets from the server.""" packet_name = packet.packet_names[pkt.packet_type] \ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' self.logger.info( 'Received packet %s data %s', packet_name, pkt.data if not isinstance(pkt.data, bytes) else '') if pkt.packet_type == packet.MESSAGE: await self._trigger_event('message', pkt.data, run_async=True) elif pkt.packet_type == packet.PONG: self.pong_received = True elif pkt.packet_type == packet.CLOSE: await self.disconnect(abort=True) elif pkt.packet_type == packet.NOOP: pass else: self.logger.error('Received unexpected packet of type %s', pkt.packet_type) async def _send_packet(self, pkt): """Queue a packet to be sent to the server.""" if self.state != 'connected': return await self.queue.put(pkt) self.logger.info( 'Sending packet %s data %s', packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '') async def _send_request( self, method, url, headers=None, body=None, timeout=None): # pragma: no cover if self.http is None or self.http.closed: self.http = aiohttp.ClientSession() http_method = getattr(self.http, method.lower()) try: if not self.ssl_verify: return await http_method( url, headers=headers, data=body, timeout=aiohttp.ClientTimeout(total=timeout), ssl=False) else: return await http_method( url, headers=headers, data=body, timeout=aiohttp.ClientTimeout(total=timeout)) except (aiohttp.ClientError, asyncio.TimeoutError) as exc: self.logger.info('HTTP %s request to %s failed with error %s.', method, url, exc) async def _trigger_event(self, event, *args, **kwargs): """Invoke an event handler.""" run_async = kwargs.pop('run_async', False) ret = None if event in self.handlers: if asyncio.iscoroutinefunction(self.handlers[event]) is True: if run_async: return self.start_background_task(self.handlers[event], *args) else: try: ret = await self.handlers[event](*args) except asyncio.CancelledError: # pragma: no cover pass except: self.logger.exception(event + ' async handler error') if event == 'connect': # if connect handler raised error we reject the # connection return False else: if run_async: async def async_handler(): return self.handlers[event](*args) return self.start_background_task(async_handler) else: try: ret = self.handlers[event](*args) except: self.logger.exception(event + ' handler error') if event == 'connect': # if connect handler raised error we reject the # connection return False return ret async def _ping_loop(self): """This background task sends a PING to the server at the requested interval. """ self.pong_received = True if self.ping_loop_event is None: self.ping_loop_event = self.create_event() else: self.ping_loop_event.clear() while self.state == 'connected': if not self.pong_received: self.logger.info( 'PONG response has not been received, aborting') if self.ws: await self.ws.close() await self.queue.put(None) break self.pong_received = False await self._send_packet(packet.Packet(packet.PING)) try: await asyncio.wait_for(self.ping_loop_event.wait(), self.ping_interval) except (asyncio.TimeoutError, asyncio.CancelledError): # pragma: no cover pass self.logger.info('Exiting ping task') async def _read_loop_polling(self): """Read packets by polling the Engine.IO server.""" while self.state == 'connected': self.logger.info( 'Sending polling GET request to ' + self.base_url) r = await self._send_request( 'GET', self.base_url + self._get_url_timestamp(), timeout=max(self.ping_interval, self.ping_timeout) + 5) if r is None: self.logger.warning( 'Connection refused by the server, aborting') await self.queue.put(None) break if r.status < 200 or r.status >= 300: self.logger.warning('Unexpected status code %s in server ' 'response, aborting', r.status) await self.queue.put(None) break try: p = payload.Payload(encoded_payload=await r.read()) except ValueError: self.logger.warning( 'Unexpected packet from server, aborting') await self.queue.put(None) break for pkt in p.packets: await self._receive_packet(pkt) self.logger.info('Waiting for write loop task to end') await self.write_loop_task self.logger.info('Waiting for ping loop task to end') if self.ping_loop_event: # pragma: no cover self.ping_loop_event.set() await self.ping_loop_task if self.state == 'connected': await self._trigger_event('disconnect', run_async=False) try: client.connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() self.logger.info('Exiting read loop task') async def _read_loop_websocket(self): """Read packets from the Engine.IO WebSocket connection.""" while self.state == 'connected': p = None try: p = (await self.ws.receive()).data except aiohttp.client_exceptions.ServerDisconnectedError: self.logger.info( 'Read loop: WebSocket connection was closed, aborting') await self.queue.put(None) break except Exception as e: self.logger.info( 'Unexpected error "%s", aborting', str(e)) await self.queue.put(None) break if isinstance(p, six.text_type): # pragma: no cover p = p.encode('utf-8') pkt = packet.Packet(encoded_packet=p) await self._receive_packet(pkt) self.logger.info('Waiting for write loop task to end') await self.write_loop_task self.logger.info('Waiting for ping loop task to end') if self.ping_loop_event: # pragma: no cover self.ping_loop_event.set() await self.ping_loop_task if self.state == 'connected': await self._trigger_event('disconnect', run_async=False) try: client.connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() self.logger.info('Exiting read loop task') async def _write_loop(self): """This background task sends packages to the server as they are pushed to the send queue. """ while self.state == 'connected': # to simplify the timeout handling, use the maximum of the # ping interval and ping timeout as timeout, with an extra 5 # seconds grace period timeout = max(self.ping_interval, self.ping_timeout) + 5 packets = None try: packets = [await asyncio.wait_for(self.queue.get(), timeout)] except (self.queue.Empty, asyncio.TimeoutError, asyncio.CancelledError): self.logger.error('packet queue is empty, aborting') break if packets == [None]: self.queue.task_done() packets = [] else: while True: try: packets.append(self.queue.get_nowait()) except self.queue.Empty: break if packets[-1] is None: packets = packets[:-1] self.queue.task_done() break if not packets: # empty packet list returned -> connection closed break if self.current_transport == 'polling': p = payload.Payload(packets=packets) r = await self._send_request( 'POST', self.base_url, body=p.encode(), headers={'Content-Type': 'application/octet-stream'}, timeout=self.request_timeout) for pkt in packets: self.queue.task_done() if r is None: self.logger.warning( 'Connection refused by the server, aborting') break if r.status < 200 or r.status >= 300: self.logger.warning('Unexpected status code %s in server ' 'response, aborting', r.status) self._reset() break else: # websocket try: for pkt in packets: if pkt.binary: await self.ws.send_bytes(pkt.encode( always_bytes=False)) else: await self.ws.send_str(pkt.encode( always_bytes=False)) self.queue.task_done() except aiohttp.client_exceptions.ServerDisconnectedError: self.logger.info( 'Write loop: WebSocket connection was closed, ' 'aborting') break self.logger.info('Exiting write loop task') python-engineio-3.11.1/engineio/asyncio_server.py0000644000076600000240000005044113564316774023237 0ustar mgrinbergstaff00000000000000import asyncio import six from six.moves import urllib from . import exceptions from . import packet from . import server from . import asyncio_socket class AsyncServer(server.Server): """An Engine.IO server for asyncio. This class implements a fully compliant Engine.IO web server with support for websocket and long-polling transports, compatible with the asyncio framework on Python 3.5 or newer. :param async_mode: The asynchronous model to use. See the Deployment section in the documentation for a description of the available options. Valid async modes are "aiohttp", "sanic", "tornado" and "asgi". If this argument is not given, "aiohttp" is tried first, followed by "sanic", "tornado", and finally "asgi". The first async mode that has all its dependencies installed is the one that is chosen. :param ping_timeout: The time in seconds that the client waits for the server to respond before disconnecting. :param ping_interval: The interval in seconds at which the client pings the server. The default is 25 seconds. For advanced control, a two element tuple can be given, where the first number is the ping interval and the second is a grace period added by the server. The default grace period is 5 seconds. :param max_http_buffer_size: The maximum size of a message when using the polling transport. :param allow_upgrades: Whether to allow transport upgrades or not. :param http_compression: Whether to compress packages when using the polling transport. :param compression_threshold: Only compress messages when their byte size is greater than this value. :param cookie: Name of the HTTP cookie that contains the client session id. If set to ``None``, a cookie is not sent to the client. :param cors_allowed_origins: Origin or list of origins that are allowed to connect to this server. Only the same origin is allowed by default. Set this argument to ``'*'`` to allow all origins, or to ``[]`` to disable CORS handling. :param cors_credentials: Whether credentials (cookies, authentication) are allowed in requests to this server. :param logger: To enable logging set to ``True`` or pass a logger object to use. To disable logging set to ``False``. :param json: An alternative json module to use for encoding and decoding packets. Custom json modules must have ``dumps`` and ``loads`` functions that are compatible with the standard library versions. :param async_handlers: If set to ``True``, run message event handlers in non-blocking threads. To run handlers synchronously, set to ``False``. The default is ``True``. :param kwargs: Reserved for future extensions, any additional parameters given as keyword arguments will be silently ignored. """ def is_asyncio_based(self): return True def async_modes(self): return ['aiohttp', 'sanic', 'tornado', 'asgi'] def attach(self, app, engineio_path='engine.io'): """Attach the Engine.IO server to an application.""" engineio_path = engineio_path.strip('/') self._async['create_route'](app, self, '/{}/'.format(engineio_path)) async def send(self, sid, data, binary=None): """Send a message to a client. :param sid: The session id of the recipient client. :param data: The data to send to the client. Data can be of type ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` or ``dict``, the data will be serialized as JSON. :param binary: ``True`` to send packet as binary, ``False`` to send as text. If not given, unicode (Python 2) and str (Python 3) are sent as text, and str (Python 2) and bytes (Python 3) are sent as binary. Note: this method is a coroutine. """ try: socket = self._get_socket(sid) except KeyError: # the socket is not available self.logger.warning('Cannot send to sid %s', sid) return await socket.send(packet.Packet(packet.MESSAGE, data=data, binary=binary)) async def get_session(self, sid): """Return the user session for a client. :param sid: The session id of the client. The return value is a dictionary. Modifications made to this dictionary are not guaranteed to be preserved. If you want to modify the user session, use the ``session`` context manager instead. """ socket = self._get_socket(sid) return socket.session async def save_session(self, sid, session): """Store the user session for a client. :param sid: The session id of the client. :param session: The session dictionary. """ socket = self._get_socket(sid) socket.session = session def session(self, sid): """Return the user session for a client with context manager syntax. :param sid: The session id of the client. This is a context manager that returns the user session dictionary for the client. Any changes that are made to this dictionary inside the context manager block are saved back to the session. Example usage:: @eio.on('connect') def on_connect(sid, environ): username = authenticate_user(environ) if not username: return False with eio.session(sid) as session: session['username'] = username @eio.on('message') def on_message(sid, msg): async with eio.session(sid) as session: print('received message from ', session['username']) """ class _session_context_manager(object): def __init__(self, server, sid): self.server = server self.sid = sid self.session = None async def __aenter__(self): self.session = await self.server.get_session(sid) return self.session async def __aexit__(self, *args): await self.server.save_session(sid, self.session) return _session_context_manager(self, sid) async def disconnect(self, sid=None): """Disconnect a client. :param sid: The session id of the client to close. If this parameter is not given, then all clients are closed. Note: this method is a coroutine. """ if sid is not None: try: socket = self._get_socket(sid) except KeyError: # pragma: no cover # the socket was already closed or gone pass else: await socket.close() if sid in self.sockets: # pragma: no cover del self.sockets[sid] else: await asyncio.wait([client.close() for client in six.itervalues(self.sockets)]) self.sockets = {} async def handle_request(self, *args, **kwargs): """Handle an HTTP request from the client. This is the entry point of the Engine.IO application. This function returns the HTTP response to deliver to the client. Note: this method is a coroutine. """ translate_request = self._async['translate_request'] if asyncio.iscoroutinefunction(translate_request): environ = await translate_request(*args, **kwargs) else: environ = translate_request(*args, **kwargs) if self.cors_allowed_origins != []: # Validate the origin header if present # This is important for WebSocket more than for HTTP, since # browsers only apply CORS controls to HTTP. origin = environ.get('HTTP_ORIGIN') if origin: allowed_origins = self._cors_allowed_origins(environ) if allowed_origins is not None and origin not in \ allowed_origins: self.logger.info(origin + ' is not an accepted origin.') r = self._bad_request() make_response = self._async['make_response'] if asyncio.iscoroutinefunction(make_response): response = await make_response( r['status'], r['headers'], r['response'], environ) else: response = make_response(r['status'], r['headers'], r['response'], environ) return response method = environ['REQUEST_METHOD'] query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) sid = query['sid'][0] if 'sid' in query else None b64 = False jsonp = False jsonp_index = None if 'b64' in query: if query['b64'][0] == "1" or query['b64'][0].lower() == "true": b64 = True if 'j' in query: jsonp = True try: jsonp_index = int(query['j'][0]) except (ValueError, KeyError, IndexError): # Invalid JSONP index number pass if jsonp and jsonp_index is None: self.logger.warning('Invalid JSONP index number') r = self._bad_request() elif method == 'GET': if sid is None: transport = query.get('transport', ['polling'])[0] if transport != 'polling' and transport != 'websocket': self.logger.warning('Invalid transport %s', transport) r = self._bad_request() else: r = await self._handle_connect(environ, transport, b64, jsonp_index) else: if sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: packets = await socket.handle_get_request(environ) if isinstance(packets, list): r = self._ok(packets, b64=b64, jsonp_index=jsonp_index) else: r = packets except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover await self.disconnect(sid) r = self._bad_request() if sid in self.sockets and self.sockets[sid].closed: del self.sockets[sid] elif method == 'POST': if sid is None or sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: await socket.handle_post_request(environ) r = self._ok(jsonp_index=jsonp_index) except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover await self.disconnect(sid) r = self._bad_request() except: # pragma: no cover # for any other unexpected errors, we log the error # and keep going self.logger.exception('post request handler error') r = self._ok(jsonp_index=jsonp_index) elif method == 'OPTIONS': r = self._ok() else: self.logger.warning('Method %s not supported', method) r = self._method_not_found() if not isinstance(r, dict): return r if self.http_compression and \ len(r['response']) >= self.compression_threshold: encodings = [e.split(';')[0].strip() for e in environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] for encoding in encodings: if encoding in self.compression_methods: r['response'] = \ getattr(self, '_' + encoding)(r['response']) r['headers'] += [('Content-Encoding', encoding)] break cors_headers = self._cors_headers(environ) make_response = self._async['make_response'] if asyncio.iscoroutinefunction(make_response): response = await make_response(r['status'], r['headers'] + cors_headers, r['response'], environ) else: response = make_response(r['status'], r['headers'] + cors_headers, r['response'], environ) return response def start_background_task(self, target, *args, **kwargs): """Start a background task using the appropriate async model. This is a utility function that applications can use to start a background task using the method that is compatible with the selected async mode. :param target: the target function to execute. :param args: arguments to pass to the function. :param kwargs: keyword arguments to pass to the function. The return value is a ``asyncio.Task`` object. """ return asyncio.ensure_future(target(*args, **kwargs)) async def sleep(self, seconds=0): """Sleep for the requested amount of time using the appropriate async model. This is a utility function that applications can use to put a task to sleep without having to worry about using the correct call for the selected async mode. Note: this method is a coroutine. """ return await asyncio.sleep(seconds) def create_queue(self, *args, **kwargs): """Create a queue object using the appropriate async model. This is a utility function that applications can use to create a queue without having to worry about using the correct call for the selected async mode. For asyncio based async modes, this returns an instance of ``asyncio.Queue``. """ return asyncio.Queue(*args, **kwargs) def get_queue_empty_exception(self): """Return the queue empty exception for the appropriate async model. This is a utility function that applications can use to work with a queue without having to worry about using the correct call for the selected async mode. For asyncio based async modes, this returns an instance of ``asyncio.QueueEmpty``. """ return asyncio.QueueEmpty def create_event(self, *args, **kwargs): """Create an event object using the appropriate async model. This is a utility function that applications can use to create an event without having to worry about using the correct call for the selected async mode. For asyncio based async modes, this returns an instance of ``asyncio.Event``. """ return asyncio.Event(*args, **kwargs) async def _handle_connect(self, environ, transport, b64=False, jsonp_index=None): """Handle a client connection request.""" if self.start_service_task: # start the service task to monitor connected clients self.start_service_task = False self.start_background_task(self._service_task) sid = self._generate_id() s = asyncio_socket.AsyncSocket(self, sid) self.sockets[sid] = s pkt = packet.Packet( packet.OPEN, {'sid': sid, 'upgrades': self._upgrades(sid, transport), 'pingTimeout': int(self.ping_timeout * 1000), 'pingInterval': int(self.ping_interval * 1000)}) await s.send(pkt) ret = await self._trigger_event('connect', sid, environ, run_async=False) if ret is False: del self.sockets[sid] self.logger.warning('Application rejected connection') return self._unauthorized() if transport == 'websocket': ret = await s.handle_get_request(environ) if s.closed: # websocket connection ended, so we are done del self.sockets[sid] return ret else: s.connected = True headers = None if self.cookie: headers = [('Set-Cookie', self.cookie + '=' + sid)] try: return self._ok(await s.poll(), headers=headers, b64=b64, jsonp_index=jsonp_index) except exceptions.QueueEmpty: return self._bad_request() async def _trigger_event(self, event, *args, **kwargs): """Invoke an event handler.""" run_async = kwargs.pop('run_async', False) ret = None if event in self.handlers: if asyncio.iscoroutinefunction(self.handlers[event]) is True: if run_async: return self.start_background_task(self.handlers[event], *args) else: try: ret = await self.handlers[event](*args) except asyncio.CancelledError: # pragma: no cover pass except: self.logger.exception(event + ' async handler error') if event == 'connect': # if connect handler raised error we reject the # connection return False else: if run_async: async def async_handler(): return self.handlers[event](*args) return self.start_background_task(async_handler) else: try: ret = self.handlers[event](*args) except: self.logger.exception(event + ' handler error') if event == 'connect': # if connect handler raised error we reject the # connection return False return ret async def _service_task(self): # pragma: no cover """Monitor connected clients and clean up those that time out.""" while True: if len(self.sockets) == 0: # nothing to do await self.sleep(self.ping_timeout) continue # go through the entire client list in a ping interval cycle sleep_interval = self.ping_timeout / len(self.sockets) try: # iterate over the current clients for socket in self.sockets.copy().values(): if not socket.closing and not socket.closed: await socket.check_ping_timeout() await self.sleep(sleep_interval) except (SystemExit, KeyboardInterrupt, asyncio.CancelledError): self.logger.info('service task canceled') break except: if asyncio.get_event_loop().is_closed(): self.logger.info('event loop is closed, exiting service ' 'task') break # an unexpected exception has occurred, log it and continue self.logger.exception('service task exception') python-engineio-3.11.1/engineio/asyncio_socket.py0000644000076600000240000002266313564316347023222 0ustar mgrinbergstaff00000000000000import asyncio import six import sys import time from . import exceptions from . import packet from . import payload from . import socket class AsyncSocket(socket.Socket): async def poll(self): """Wait for packets to send to the client.""" try: packets = [await asyncio.wait_for(self.queue.get(), self.server.ping_timeout)] self.queue.task_done() except (asyncio.TimeoutError, asyncio.CancelledError): raise exceptions.QueueEmpty() if packets == [None]: return [] try: packets.append(self.queue.get_nowait()) self.queue.task_done() except asyncio.QueueEmpty: pass return packets async def receive(self, pkt): """Receive packet from the client.""" self.server.logger.info('%s: Received packet %s data %s', self.sid, packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '') if pkt.packet_type == packet.PING: self.last_ping = time.time() await self.send(packet.Packet(packet.PONG, pkt.data)) elif pkt.packet_type == packet.MESSAGE: await self.server._trigger_event( 'message', self.sid, pkt.data, run_async=self.server.async_handlers) elif pkt.packet_type == packet.UPGRADE: await self.send(packet.Packet(packet.NOOP)) elif pkt.packet_type == packet.CLOSE: await self.close(wait=False, abort=True) else: raise exceptions.UnknownPacketError() async def check_ping_timeout(self): """Make sure the client is still sending pings. This helps detect disconnections for long-polling clients. """ if self.closed: raise exceptions.SocketIsClosedError() if time.time() - self.last_ping > self.server.ping_interval + \ self.server.ping_interval_grace_period: self.server.logger.info('%s: Client is gone, closing socket', self.sid) # Passing abort=False here will cause close() to write a # CLOSE packet. This has the effect of updating half-open sockets # to their correct state of disconnected await self.close(wait=False, abort=False) return False return True async def send(self, pkt): """Send a packet to the client.""" if not await self.check_ping_timeout(): return if self.upgrading: self.packet_backlog.append(pkt) else: await self.queue.put(pkt) self.server.logger.info('%s: Sending packet %s data %s', self.sid, packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '') async def handle_get_request(self, environ): """Handle a long-polling GET request from the client.""" connections = [ s.strip() for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] transport = environ.get('HTTP_UPGRADE', '').lower() if 'upgrade' in connections and transport in self.upgrade_protocols: self.server.logger.info('%s: Received request to upgrade to %s', self.sid, transport) return await getattr(self, '_upgrade_' + transport)(environ) try: packets = await self.poll() except exceptions.QueueEmpty: exc = sys.exc_info() await self.close(wait=False) six.reraise(*exc) return packets async def handle_post_request(self, environ): """Handle a long-polling POST request from the client.""" length = int(environ.get('CONTENT_LENGTH', '0')) if length > self.server.max_http_buffer_size: raise exceptions.ContentTooLongError() else: body = await environ['wsgi.input'].read(length) p = payload.Payload(encoded_payload=body) for pkt in p.packets: await self.receive(pkt) async def close(self, wait=True, abort=False): """Close the socket connection.""" if not self.closed and not self.closing: self.closing = True await self.server._trigger_event('disconnect', self.sid) if not abort: await self.send(packet.Packet(packet.CLOSE)) self.closed = True if wait: await self.queue.join() async def _upgrade_websocket(self, environ): """Upgrade the connection from polling to websocket.""" if self.upgraded: raise IOError('Socket has been upgraded already') if self.server._async['websocket'] is None: # the selected async mode does not support websocket return self.server._bad_request() ws = self.server._async['websocket'](self._websocket_handler) return await ws(environ) async def _websocket_handler(self, ws): """Engine.IO handler for websocket transport.""" if self.connected: # the socket was already connected, so this is an upgrade self.upgrading = True # hold packet sends during the upgrade try: pkt = await ws.wait() except IOError: # pragma: no cover return decoded_pkt = packet.Packet(encoded_packet=pkt) if decoded_pkt.packet_type != packet.PING or \ decoded_pkt.data != 'probe': self.server.logger.info( '%s: Failed websocket upgrade, no PING packet', self.sid) return await ws.send(packet.Packet( packet.PONG, data=six.text_type('probe')).encode(always_bytes=False)) await self.queue.put(packet.Packet(packet.NOOP)) # end poll try: pkt = await ws.wait() except IOError: # pragma: no cover return decoded_pkt = packet.Packet(encoded_packet=pkt) if decoded_pkt.packet_type != packet.UPGRADE: self.upgraded = False self.server.logger.info( ('%s: Failed websocket upgrade, expected UPGRADE packet, ' 'received %s instead.'), self.sid, pkt) return self.upgraded = True # flush any packets that were sent during the upgrade for pkt in self.packet_backlog: await self.queue.put(pkt) self.packet_backlog = [] self.upgrading = False else: self.connected = True self.upgraded = True # start separate writer thread async def writer(): while True: packets = None try: packets = await self.poll() except exceptions.QueueEmpty: break if not packets: # empty packet list returned -> connection closed break try: for pkt in packets: await ws.send(pkt.encode(always_bytes=False)) except: break writer_task = asyncio.ensure_future(writer()) self.server.logger.info( '%s: Upgrade to websocket successful', self.sid) while True: p = None wait_task = asyncio.ensure_future(ws.wait()) try: p = await asyncio.wait_for(wait_task, self.server.ping_timeout) except asyncio.CancelledError: # pragma: no cover # there is a bug (https://bugs.python.org/issue30508) in # asyncio that causes a "Task exception never retrieved" error # to appear when wait_task raises an exception before it gets # cancelled. Calling wait_task.exception() prevents the error # from being issued in Python 3.6, but causes other errors in # other versions, so we run it with all errors suppressed and # hope for the best. try: wait_task.exception() except: pass break except: break if p is None: # connection closed by client break if isinstance(p, six.text_type): # pragma: no cover p = p.encode('utf-8') pkt = packet.Packet(encoded_packet=p) try: await self.receive(pkt) except exceptions.UnknownPacketError: # pragma: no cover pass except exceptions.SocketIsClosedError: # pragma: no cover self.server.logger.info('Receive error -- socket is closed') break except: # pragma: no cover # if we get an unexpected exception we log the error and exit # the connection properly self.server.logger.exception('Unknown receive error') await self.queue.put(None) # unlock the writer task so it can exit await asyncio.wait_for(writer_task, timeout=None) await self.close(wait=False, abort=True) python-engineio-3.11.1/engineio/client.py0000644000076600000240000006462513573736100021460 0ustar mgrinbergstaff00000000000000import logging try: import queue except ImportError: # pragma: no cover import Queue as queue import signal import ssl import threading import time import six from six.moves import urllib try: import requests except ImportError: # pragma: no cover requests = None try: import websocket except ImportError: # pragma: no cover websocket = None from . import exceptions from . import packet from . import payload default_logger = logging.getLogger('engineio.client') connected_clients = [] if six.PY2: # pragma: no cover ConnectionError = OSError def signal_handler(sig, frame): """SIGINT handler. Disconnect all active clients and then invoke the original signal handler. """ for client in connected_clients[:]: if client.is_asyncio_based(): client.start_background_task(client.disconnect, abort=True) else: client.disconnect(abort=True) if callable(original_signal_handler): return original_signal_handler(sig, frame) else: # pragma: no cover # Handle case where no original SIGINT handler was present. return signal.default_int_handler(sig, frame) original_signal_handler = signal.signal(signal.SIGINT, signal_handler) class Client(object): """An Engine.IO client. This class implements a fully compliant Engine.IO web client with support for websocket and long-polling transports. :param logger: To enable logging set to ``True`` or pass a logger object to use. To disable logging set to ``False``. The default is ``False``. :param json: An alternative json module to use for encoding and decoding packets. Custom json modules must have ``dumps`` and ``loads`` functions that are compatible with the standard library versions. :param request_timeout: A timeout in seconds for requests. The default is 5 seconds. :param ssl_verify: ``True`` to verify SSL certificates, or ``False`` to skip SSL certificate verification, allowing connections to servers with self signed certificates. The default is ``True``. """ event_names = ['connect', 'disconnect', 'message'] def __init__(self, logger=False, json=None, request_timeout=5, ssl_verify=True): self.handlers = {} self.base_url = None self.transports = None self.current_transport = None self.sid = None self.upgrades = None self.ping_interval = None self.ping_timeout = None self.pong_received = True self.http = None self.ws = None self.read_loop_task = None self.write_loop_task = None self.ping_loop_task = None self.ping_loop_event = None self.queue = None self.state = 'disconnected' self.ssl_verify = ssl_verify if json is not None: packet.Packet.json = json if not isinstance(logger, bool): self.logger = logger else: self.logger = default_logger if not logging.root.handlers and \ self.logger.level == logging.NOTSET: if logger: self.logger.setLevel(logging.INFO) else: self.logger.setLevel(logging.ERROR) self.logger.addHandler(logging.StreamHandler()) self.request_timeout = request_timeout def is_asyncio_based(self): return False def on(self, event, handler=None): """Register an event handler. :param event: The event name. Can be ``'connect'``, ``'message'`` or ``'disconnect'``. :param handler: The function that should be invoked to handle the event. When this parameter is not given, the method acts as a decorator for the handler function. Example usage:: # as a decorator: @eio.on('connect') def connect_handler(): print('Connection request') # as a method: def message_handler(msg): print('Received message: ', msg) eio.send('response') eio.on('message', message_handler) """ if event not in self.event_names: raise ValueError('Invalid event') def set_handler(handler): self.handlers[event] = handler return handler if handler is None: return set_handler set_handler(handler) def connect(self, url, headers={}, transports=None, engineio_path='engine.io'): """Connect to an Engine.IO server. :param url: The URL of the Engine.IO server. It can include custom query string parameters if required by the server. :param headers: A dictionary with custom headers to send with the connection request. :param transports: The list of allowed transports. Valid transports are ``'polling'`` and ``'websocket'``. If not given, the polling transport is connected first, then an upgrade to websocket is attempted. :param engineio_path: The endpoint where the Engine.IO server is installed. The default value is appropriate for most cases. Example usage:: eio = engineio.Client() eio.connect('http://localhost:5000') """ if self.state != 'disconnected': raise ValueError('Client is not in a disconnected state') valid_transports = ['polling', 'websocket'] if transports is not None: if isinstance(transports, six.string_types): transports = [transports] transports = [transport for transport in transports if transport in valid_transports] if not transports: raise ValueError('No valid transports provided') self.transports = transports or valid_transports self.queue = self.create_queue() return getattr(self, '_connect_' + self.transports[0])( url, headers, engineio_path) def wait(self): """Wait until the connection with the server ends. Client applications can use this function to block the main thread during the life of the connection. """ if self.read_loop_task: self.read_loop_task.join() def send(self, data, binary=None): """Send a message to a client. :param data: The data to send to the client. Data can be of type ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` or ``dict``, the data will be serialized as JSON. :param binary: ``True`` to send packet as binary, ``False`` to send as text. If not given, unicode (Python 2) and str (Python 3) are sent as text, and str (Python 2) and bytes (Python 3) are sent as binary. """ self._send_packet(packet.Packet(packet.MESSAGE, data=data, binary=binary)) def disconnect(self, abort=False): """Disconnect from the server. :param abort: If set to ``True``, do not wait for background tasks associated with the connection to end. """ if self.state == 'connected': self._send_packet(packet.Packet(packet.CLOSE)) self.queue.put(None) self.state = 'disconnecting' self._trigger_event('disconnect', run_async=False) if self.current_transport == 'websocket': self.ws.close() if not abort: self.read_loop_task.join() self.state = 'disconnected' try: connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() def transport(self): """Return the name of the transport currently in use. The possible values returned by this function are ``'polling'`` and ``'websocket'``. """ return self.current_transport def start_background_task(self, target, *args, **kwargs): """Start a background task. This is a utility function that applications can use to start a background task. :param target: the target function to execute. :param args: arguments to pass to the function. :param kwargs: keyword arguments to pass to the function. This function returns an object compatible with the `Thread` class in the Python standard library. The `start()` method on this object is already called by this function. """ th = threading.Thread(target=target, args=args, kwargs=kwargs) th.start() return th def sleep(self, seconds=0): """Sleep for the requested amount of time.""" return time.sleep(seconds) def create_queue(self, *args, **kwargs): """Create a queue object.""" q = queue.Queue(*args, **kwargs) q.Empty = queue.Empty return q def create_event(self, *args, **kwargs): """Create an event object.""" return threading.Event(*args, **kwargs) def _reset(self): self.state = 'disconnected' self.sid = None def _connect_polling(self, url, headers, engineio_path): """Establish a long-polling connection to the Engine.IO server.""" if requests is None: # pragma: no cover # not installed self.logger.error('requests package is not installed -- cannot ' 'send HTTP requests!') return self.base_url = self._get_engineio_url(url, engineio_path, 'polling') self.logger.info('Attempting polling connection to ' + self.base_url) r = self._send_request( 'GET', self.base_url + self._get_url_timestamp(), headers=headers, timeout=self.request_timeout) if r is None: self._reset() raise exceptions.ConnectionError( 'Connection refused by the server') if r.status_code < 200 or r.status_code >= 300: raise exceptions.ConnectionError( 'Unexpected status code {} in server response'.format( r.status_code)) try: p = payload.Payload(encoded_payload=r.content) except ValueError: six.raise_from(exceptions.ConnectionError( 'Unexpected response from server'), None) open_packet = p.packets[0] if open_packet.packet_type != packet.OPEN: raise exceptions.ConnectionError( 'OPEN packet not returned by server') self.logger.info( 'Polling connection accepted with ' + str(open_packet.data)) self.sid = open_packet.data['sid'] self.upgrades = open_packet.data['upgrades'] self.ping_interval = open_packet.data['pingInterval'] / 1000.0 self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 self.current_transport = 'polling' self.base_url += '&sid=' + self.sid self.state = 'connected' connected_clients.append(self) self._trigger_event('connect', run_async=False) for pkt in p.packets[1:]: self._receive_packet(pkt) if 'websocket' in self.upgrades and 'websocket' in self.transports: # attempt to upgrade to websocket if self._connect_websocket(url, headers, engineio_path): # upgrade to websocket succeeded, we're done here return # start background tasks associated with this client self.ping_loop_task = self.start_background_task(self._ping_loop) self.write_loop_task = self.start_background_task(self._write_loop) self.read_loop_task = self.start_background_task( self._read_loop_polling) def _connect_websocket(self, url, headers, engineio_path): """Establish or upgrade to a WebSocket connection with the server.""" if websocket is None: # pragma: no cover # not installed self.logger.warning('websocket-client package not installed, only ' 'polling transport is available') return False websocket_url = self._get_engineio_url(url, engineio_path, 'websocket') if self.sid: self.logger.info( 'Attempting WebSocket upgrade to ' + websocket_url) upgrade = True websocket_url += '&sid=' + self.sid else: upgrade = False self.base_url = websocket_url self.logger.info( 'Attempting WebSocket connection to ' + websocket_url) # get the cookies from the long-polling connection so that they can # also be sent the the WebSocket route cookies = None if self.http: cookies = '; '.join(["{}={}".format(cookie.name, cookie.value) for cookie in self.http.cookies]) try: if not self.ssl_verify: ws = websocket.create_connection( websocket_url + self._get_url_timestamp(), header=headers, cookie=cookies, sslopt={"cert_reqs": ssl.CERT_NONE}) else: ws = websocket.create_connection( websocket_url + self._get_url_timestamp(), header=headers, cookie=cookies) except (ConnectionError, IOError): if upgrade: self.logger.warning( 'WebSocket upgrade failed: connection error') return False else: raise exceptions.ConnectionError('Connection error') if upgrade: p = packet.Packet(packet.PING, data=six.text_type('probe')).encode() try: ws.send(p) except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected send exception: %s', str(e)) return False try: p = ws.recv() except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected recv exception: %s', str(e)) return False pkt = packet.Packet(encoded_packet=p) if pkt.packet_type != packet.PONG or pkt.data != 'probe': self.logger.warning( 'WebSocket upgrade failed: no PONG packet') return False p = packet.Packet(packet.UPGRADE).encode() try: ws.send(p) except Exception as e: # pragma: no cover self.logger.warning( 'WebSocket upgrade failed: unexpected send exception: %s', str(e)) return False self.current_transport = 'websocket' self.logger.info('WebSocket upgrade was successful') else: try: p = ws.recv() except Exception as e: # pragma: no cover raise exceptions.ConnectionError( 'Unexpected recv exception: ' + str(e)) open_packet = packet.Packet(encoded_packet=p) if open_packet.packet_type != packet.OPEN: raise exceptions.ConnectionError('no OPEN packet') self.logger.info( 'WebSocket connection accepted with ' + str(open_packet.data)) self.sid = open_packet.data['sid'] self.upgrades = open_packet.data['upgrades'] self.ping_interval = open_packet.data['pingInterval'] / 1000.0 self.ping_timeout = open_packet.data['pingTimeout'] / 1000.0 self.current_transport = 'websocket' self.state = 'connected' connected_clients.append(self) self._trigger_event('connect', run_async=False) self.ws = ws # start background tasks associated with this client self.ping_loop_task = self.start_background_task(self._ping_loop) self.write_loop_task = self.start_background_task(self._write_loop) self.read_loop_task = self.start_background_task( self._read_loop_websocket) return True def _receive_packet(self, pkt): """Handle incoming packets from the server.""" packet_name = packet.packet_names[pkt.packet_type] \ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' self.logger.info( 'Received packet %s data %s', packet_name, pkt.data if not isinstance(pkt.data, bytes) else '') if pkt.packet_type == packet.MESSAGE: self._trigger_event('message', pkt.data, run_async=True) elif pkt.packet_type == packet.PONG: self.pong_received = True elif pkt.packet_type == packet.CLOSE: self.disconnect(abort=True) elif pkt.packet_type == packet.NOOP: pass else: self.logger.error('Received unexpected packet of type %s', pkt.packet_type) def _send_packet(self, pkt): """Queue a packet to be sent to the server.""" if self.state != 'connected': return self.queue.put(pkt) self.logger.info( 'Sending packet %s data %s', packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '') def _send_request( self, method, url, headers=None, body=None, timeout=None): # pragma: no cover if self.http is None: self.http = requests.Session() try: return self.http.request(method, url, headers=headers, data=body, timeout=timeout, verify=self.ssl_verify) except requests.exceptions.RequestException as exc: self.logger.info('HTTP %s request to %s failed with error %s.', method, url, exc) def _trigger_event(self, event, *args, **kwargs): """Invoke an event handler.""" run_async = kwargs.pop('run_async', False) if event in self.handlers: if run_async: return self.start_background_task(self.handlers[event], *args) else: try: return self.handlers[event](*args) except: self.logger.exception(event + ' handler error') def _get_engineio_url(self, url, engineio_path, transport): """Generate the Engine.IO connection URL.""" engineio_path = engineio_path.strip('/') parsed_url = urllib.parse.urlparse(url) if transport == 'polling': scheme = 'http' elif transport == 'websocket': scheme = 'ws' else: # pragma: no cover raise ValueError('invalid transport') if parsed_url.scheme in ['https', 'wss']: scheme += 's' return ('{scheme}://{netloc}/{path}/?{query}' '{sep}transport={transport}&EIO=3').format( scheme=scheme, netloc=parsed_url.netloc, path=engineio_path, query=parsed_url.query, sep='&' if parsed_url.query else '', transport=transport) def _get_url_timestamp(self): """Generate the Engine.IO query string timestamp.""" return '&t=' + str(time.time()) def _ping_loop(self): """This background task sends a PING to the server at the requested interval. """ self.pong_received = True if self.ping_loop_event is None: self.ping_loop_event = self.create_event() else: self.ping_loop_event.clear() while self.state == 'connected': if not self.pong_received: self.logger.info( 'PONG response has not been received, aborting') if self.ws: self.ws.shutdown() self.queue.put(None) break self.pong_received = False self._send_packet(packet.Packet(packet.PING)) self.ping_loop_event.wait(timeout=self.ping_interval) self.logger.info('Exiting ping task') def _read_loop_polling(self): """Read packets by polling the Engine.IO server.""" while self.state == 'connected': self.logger.info( 'Sending polling GET request to ' + self.base_url) r = self._send_request( 'GET', self.base_url + self._get_url_timestamp(), timeout=max(self.ping_interval, self.ping_timeout) + 5) if r is None: self.logger.warning( 'Connection refused by the server, aborting') self.queue.put(None) break if r.status_code < 200 or r.status_code >= 300: self.logger.warning('Unexpected status code %s in server ' 'response, aborting', r.status_code) self.queue.put(None) break try: p = payload.Payload(encoded_payload=r.content) except ValueError: self.logger.warning( 'Unexpected packet from server, aborting') self.queue.put(None) break for pkt in p.packets: self._receive_packet(pkt) self.logger.info('Waiting for write loop task to end') self.write_loop_task.join() self.logger.info('Waiting for ping loop task to end') if self.ping_loop_event: # pragma: no cover self.ping_loop_event.set() self.ping_loop_task.join() if self.state == 'connected': self._trigger_event('disconnect', run_async=False) try: connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() self.logger.info('Exiting read loop task') def _read_loop_websocket(self): """Read packets from the Engine.IO WebSocket connection.""" while self.state == 'connected': p = None try: p = self.ws.recv() except websocket.WebSocketConnectionClosedException: self.logger.warning( 'WebSocket connection was closed, aborting') self.queue.put(None) break except Exception as e: self.logger.info( 'Unexpected error "%s", aborting', str(e)) self.queue.put(None) break if isinstance(p, six.text_type): # pragma: no cover p = p.encode('utf-8') pkt = packet.Packet(encoded_packet=p) self._receive_packet(pkt) self.logger.info('Waiting for write loop task to end') self.write_loop_task.join() self.logger.info('Waiting for ping loop task to end') if self.ping_loop_event: # pragma: no cover self.ping_loop_event.set() self.ping_loop_task.join() if self.state == 'connected': self._trigger_event('disconnect', run_async=False) try: connected_clients.remove(self) except ValueError: # pragma: no cover pass self._reset() self.logger.info('Exiting read loop task') def _write_loop(self): """This background task sends packages to the server as they are pushed to the send queue. """ while self.state == 'connected': # to simplify the timeout handling, use the maximum of the # ping interval and ping timeout as timeout, with an extra 5 # seconds grace period timeout = max(self.ping_interval, self.ping_timeout) + 5 packets = None try: packets = [self.queue.get(timeout=timeout)] except self.queue.Empty: self.logger.error('packet queue is empty, aborting') break if packets == [None]: self.queue.task_done() packets = [] else: while True: try: packets.append(self.queue.get(block=False)) except self.queue.Empty: break if packets[-1] is None: packets = packets[:-1] self.queue.task_done() break if not packets: # empty packet list returned -> connection closed break if self.current_transport == 'polling': p = payload.Payload(packets=packets) r = self._send_request( 'POST', self.base_url, body=p.encode(), headers={'Content-Type': 'application/octet-stream'}, timeout=self.request_timeout) for pkt in packets: self.queue.task_done() if r is None: self.logger.warning( 'Connection refused by the server, aborting') break if r.status_code < 200 or r.status_code >= 300: self.logger.warning('Unexpected status code %s in server ' 'response, aborting', r.status_code) self._reset() break else: # websocket try: for pkt in packets: encoded_packet = pkt.encode(always_bytes=False) if pkt.binary: self.ws.send_binary(encoded_packet) else: self.ws.send(encoded_packet) self.queue.task_done() except websocket.WebSocketConnectionClosedException: self.logger.warning( 'WebSocket connection was closed, aborting') break self.logger.info('Exiting write loop task') python-engineio-3.11.1/engineio/exceptions.py0000644000076600000240000000044413564316347022357 0ustar mgrinbergstaff00000000000000class EngineIOError(Exception): pass class ContentTooLongError(EngineIOError): pass class UnknownPacketError(EngineIOError): pass class QueueEmpty(EngineIOError): pass class SocketIsClosedError(EngineIOError): pass class ConnectionError(EngineIOError): pass python-engineio-3.11.1/engineio/middleware.py0000644000076600000240000000731413564316347022316 0ustar mgrinbergstaff00000000000000import os from engineio.static_files import get_static_file class WSGIApp(object): """WSGI application middleware for Engine.IO. This middleware dispatches traffic to an Engine.IO application. It can also serve a list of static files to the client, or forward unrelated HTTP traffic to another WSGI application. :param engineio_app: The Engine.IO server. Must be an instance of the ``engineio.Server`` class. :param wsgi_app: The WSGI app that receives all other traffic. :param static_files: A dictionary with static file mapping rules. See the documentation for details on this argument. :param engineio_path: The endpoint where the Engine.IO application should be installed. The default value is appropriate for most cases. Example usage:: import engineio import eventlet eio = engineio.Server() app = engineio.WSGIApp(eio, static_files={ '/': {'content_type': 'text/html', 'filename': 'index.html'}, '/index.html': {'content_type': 'text/html', 'filename': 'index.html'}, }) eventlet.wsgi.server(eventlet.listen(('', 8000)), app) """ def __init__(self, engineio_app, wsgi_app=None, static_files=None, engineio_path='engine.io'): self.engineio_app = engineio_app self.wsgi_app = wsgi_app self.engineio_path = engineio_path.strip('/') self.static_files = static_files or {} def __call__(self, environ, start_response): if 'gunicorn.socket' in environ: # gunicorn saves the socket under environ['gunicorn.socket'], while # eventlet saves it under environ['eventlet.input']. Eventlet also # stores the socket inside a wrapper class, while gunicon writes it # directly into the environment. To give eventlet's WebSocket # module access to this socket when running under gunicorn, here we # copy the socket to the eventlet format. class Input(object): def __init__(self, socket): self.socket = socket def get_socket(self): return self.socket environ['eventlet.input'] = Input(environ['gunicorn.socket']) path = environ['PATH_INFO'] if path is not None and \ path.startswith('/{0}/'.format(self.engineio_path)): return self.engineio_app.handle_request(environ, start_response) else: static_file = get_static_file(path, self.static_files) \ if self.static_files else None if static_file: if os.path.exists(static_file['filename']): start_response( '200 OK', [('Content-Type', static_file['content_type'])]) with open(static_file['filename'], 'rb') as f: return [f.read()] else: return self.not_found(start_response) elif self.wsgi_app is not None: return self.wsgi_app(environ, start_response) return self.not_found(start_response) def not_found(self, start_response): start_response("404 Not Found", [('Content-Type', 'text/plain')]) return [b'Not Found'] class Middleware(WSGIApp): """This class has been renamed to ``WSGIApp`` and is now deprecated.""" def __init__(self, engineio_app, wsgi_app=None, engineio_path='engine.io'): super(Middleware, self).__init__(engineio_app, wsgi_app, engineio_path=engineio_path) python-engineio-3.11.1/engineio/packet.py0000644000076600000240000000666213564316347021455 0ustar mgrinbergstaff00000000000000import base64 import json as _json import six (OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6) packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP'] binary_types = (six.binary_type, bytearray) class Packet(object): """Engine.IO packet.""" json = _json def __init__(self, packet_type=NOOP, data=None, binary=None, encoded_packet=None): self.packet_type = packet_type self.data = data if binary is not None: self.binary = binary elif isinstance(data, six.text_type): self.binary = False elif isinstance(data, binary_types): self.binary = True else: self.binary = False if encoded_packet: self.decode(encoded_packet) def encode(self, b64=False, always_bytes=True): """Encode the packet for transmission.""" if self.binary and not b64: encoded_packet = six.int2byte(self.packet_type) else: encoded_packet = six.text_type(self.packet_type) if self.binary and b64: encoded_packet = 'b' + encoded_packet if self.binary: if b64: encoded_packet += base64.b64encode(self.data).decode('utf-8') else: encoded_packet += self.data elif isinstance(self.data, six.string_types): encoded_packet += self.data elif isinstance(self.data, dict) or isinstance(self.data, list): encoded_packet += self.json.dumps(self.data, separators=(',', ':')) elif self.data is not None: encoded_packet += str(self.data) if always_bytes and not isinstance(encoded_packet, binary_types): encoded_packet = encoded_packet.encode('utf-8') return encoded_packet def decode(self, encoded_packet): """Decode a transmitted package.""" b64 = False if not isinstance(encoded_packet, binary_types): encoded_packet = encoded_packet.encode('utf-8') elif not isinstance(encoded_packet, bytes): encoded_packet = bytes(encoded_packet) self.packet_type = six.byte2int(encoded_packet[0:1]) if self.packet_type == 98: # 'b' --> binary base64 encoded packet self.binary = True encoded_packet = encoded_packet[1:] self.packet_type = six.byte2int(encoded_packet[0:1]) self.packet_type -= 48 b64 = True elif self.packet_type >= 48: self.packet_type -= 48 self.binary = False else: self.binary = True self.data = None if len(encoded_packet) > 1: if self.binary: if b64: self.data = base64.b64decode(encoded_packet[1:]) else: self.data = encoded_packet[1:] else: try: self.data = self.json.loads( encoded_packet[1:].decode('utf-8')) if isinstance(self.data, int): # do not allow integer payloads, see # github.com/miguelgrinberg/python-engineio/issues/75 # for background on this decision raise ValueError except ValueError: self.data = encoded_packet[1:].decode('utf-8') python-engineio-3.11.1/engineio/payload.py0000644000076600000240000000604213564316347021627 0ustar mgrinbergstaff00000000000000import six from . import packet from six.moves import urllib class Payload(object): """Engine.IO payload.""" max_decode_packets = 16 def __init__(self, packets=None, encoded_payload=None): self.packets = packets or [] if encoded_payload is not None: self.decode(encoded_payload) def encode(self, b64=False, jsonp_index=None): """Encode the payload for transmission.""" encoded_payload = b'' for pkt in self.packets: encoded_packet = pkt.encode(b64=b64) packet_len = len(encoded_packet) if b64: encoded_payload += str(packet_len).encode('utf-8') + b':' + \ encoded_packet else: binary_len = b'' while packet_len != 0: binary_len = six.int2byte(packet_len % 10) + binary_len packet_len = int(packet_len / 10) if not pkt.binary: encoded_payload += b'\0' else: encoded_payload += b'\1' encoded_payload += binary_len + b'\xff' + encoded_packet if jsonp_index is not None: encoded_payload = b'___eio[' + \ str(jsonp_index).encode() + \ b']("' + \ encoded_payload.replace(b'"', b'\\"') + \ b'");' return encoded_payload def decode(self, encoded_payload): """Decode a transmitted payload.""" self.packets = [] if len(encoded_payload) == 0: return # JSONP POST payload starts with 'd=' if encoded_payload.startswith(b'd='): encoded_payload = urllib.parse.parse_qs( encoded_payload)[b'd'][0] i = 0 if six.byte2int(encoded_payload[0:1]) <= 1: # binary encoding while i < len(encoded_payload): if len(self.packets) >= self.max_decode_packets: raise ValueError('Too many packets in payload') packet_len = 0 i += 1 while six.byte2int(encoded_payload[i:i + 1]) != 255: packet_len = packet_len * 10 + six.byte2int( encoded_payload[i:i + 1]) i += 1 self.packets.append(packet.Packet( encoded_packet=encoded_payload[i + 1:i + 1 + packet_len])) i += packet_len + 1 else: # assume text encoding encoded_payload = encoded_payload.decode('utf-8') while i < len(encoded_payload): if len(self.packets) >= self.max_decode_packets: raise ValueError('Too many packets in payload') j = encoded_payload.find(':', i) packet_len = int(encoded_payload[i:j]) pkt = encoded_payload[j + 1:j + 1 + packet_len] self.packets.append(packet.Packet(encoded_packet=pkt)) i = j + 1 + packet_len python-engineio-3.11.1/engineio/server.py0000644000076600000240000007056413564316774021522 0ustar mgrinbergstaff00000000000000import gzip import importlib import logging import uuid import zlib import six from six.moves import urllib from . import exceptions from . import packet from . import payload from . import socket default_logger = logging.getLogger('engineio.server') class Server(object): """An Engine.IO server. This class implements a fully compliant Engine.IO web server with support for websocket and long-polling transports. :param async_mode: The asynchronous model to use. See the Deployment section in the documentation for a description of the available options. Valid async modes are "threading", "eventlet", "gevent" and "gevent_uwsgi". If this argument is not given, "eventlet" is tried first, then "gevent_uwsgi", then "gevent", and finally "threading". The first async mode that has all its dependencies installed is the one that is chosen. :param ping_timeout: The time in seconds that the client waits for the server to respond before disconnecting. The default is 60 seconds. :param ping_interval: The interval in seconds at which the client pings the server. The default is 25 seconds. For advanced control, a two element tuple can be given, where the first number is the ping interval and the second is a grace period added by the server. The default grace period is 5 seconds. :param max_http_buffer_size: The maximum size of a message when using the polling transport. The default is 100,000,000 bytes. :param allow_upgrades: Whether to allow transport upgrades or not. The default is ``True``. :param http_compression: Whether to compress packages when using the polling transport. The default is ``True``. :param compression_threshold: Only compress messages when their byte size is greater than this value. The default is 1024 bytes. :param cookie: Name of the HTTP cookie that contains the client session id. If set to ``None``, a cookie is not sent to the client. The default is ``'io'``. :param cors_allowed_origins: Origin or list of origins that are allowed to connect to this server. Only the same origin is allowed by default. Set this argument to ``'*'`` to allow all origins, or to ``[]`` to disable CORS handling. :param cors_credentials: Whether credentials (cookies, authentication) are allowed in requests to this server. The default is ``True``. :param logger: To enable logging set to ``True`` or pass a logger object to use. To disable logging set to ``False``. The default is ``False``. :param json: An alternative json module to use for encoding and decoding packets. Custom json modules must have ``dumps`` and ``loads`` functions that are compatible with the standard library versions. :param async_handlers: If set to ``True``, run message event handlers in non-blocking threads. To run handlers synchronously, set to ``False``. The default is ``True``. :param monitor_clients: If set to ``True``, a background task will ensure inactive clients are closed. Set to ``False`` to disable the monitoring task (not recommended). The default is ``True``. :param kwargs: Reserved for future extensions, any additional parameters given as keyword arguments will be silently ignored. """ compression_methods = ['gzip', 'deflate'] event_names = ['connect', 'disconnect', 'message'] _default_monitor_clients = True def __init__(self, async_mode=None, ping_timeout=60, ping_interval=25, max_http_buffer_size=100000000, allow_upgrades=True, http_compression=True, compression_threshold=1024, cookie='io', cors_allowed_origins=None, cors_credentials=True, logger=False, json=None, async_handlers=True, monitor_clients=None, **kwargs): self.ping_timeout = ping_timeout if isinstance(ping_interval, tuple): self.ping_interval = ping_interval[0] self.ping_interval_grace_period = ping_interval[1] else: self.ping_interval = ping_interval self.ping_interval_grace_period = 5 self.max_http_buffer_size = max_http_buffer_size self.allow_upgrades = allow_upgrades self.http_compression = http_compression self.compression_threshold = compression_threshold self.cookie = cookie self.cors_allowed_origins = cors_allowed_origins self.cors_credentials = cors_credentials self.async_handlers = async_handlers self.sockets = {} self.handlers = {} self.start_service_task = monitor_clients \ if monitor_clients is not None else self._default_monitor_clients if json is not None: packet.Packet.json = json if not isinstance(logger, bool): self.logger = logger else: self.logger = default_logger if not logging.root.handlers and \ self.logger.level == logging.NOTSET: if logger: self.logger.setLevel(logging.INFO) else: self.logger.setLevel(logging.ERROR) self.logger.addHandler(logging.StreamHandler()) modes = self.async_modes() if async_mode is not None: modes = [async_mode] if async_mode in modes else [] self._async = None self.async_mode = None for mode in modes: try: self._async = importlib.import_module( 'engineio.async_drivers.' + mode)._async asyncio_based = self._async['asyncio'] \ if 'asyncio' in self._async else False if asyncio_based != self.is_asyncio_based(): continue # pragma: no cover self.async_mode = mode break except ImportError: pass if self.async_mode is None: raise ValueError('Invalid async_mode specified') if self.is_asyncio_based() and \ ('asyncio' not in self._async or not self._async['asyncio']): # pragma: no cover raise ValueError('The selected async_mode is not asyncio ' 'compatible') if not self.is_asyncio_based() and 'asyncio' in self._async and \ self._async['asyncio']: # pragma: no cover raise ValueError('The selected async_mode requires asyncio and ' 'must use the AsyncServer class') self.logger.info('Server initialized for %s.', self.async_mode) def is_asyncio_based(self): return False def async_modes(self): return ['eventlet', 'gevent_uwsgi', 'gevent', 'threading'] def on(self, event, handler=None): """Register an event handler. :param event: The event name. Can be ``'connect'``, ``'message'`` or ``'disconnect'``. :param handler: The function that should be invoked to handle the event. When this parameter is not given, the method acts as a decorator for the handler function. Example usage:: # as a decorator: @eio.on('connect') def connect_handler(sid, environ): print('Connection request') if environ['REMOTE_ADDR'] in blacklisted: return False # reject # as a method: def message_handler(sid, msg): print('Received message: ', msg) eio.send(sid, 'response') eio.on('message', message_handler) The handler function receives the ``sid`` (session ID) for the client as first argument. The ``'connect'`` event handler receives the WSGI environment as a second argument, and can return ``False`` to reject the connection. The ``'message'`` handler receives the message payload as a second argument. The ``'disconnect'`` handler does not take a second argument. """ if event not in self.event_names: raise ValueError('Invalid event') def set_handler(handler): self.handlers[event] = handler return handler if handler is None: return set_handler set_handler(handler) def send(self, sid, data, binary=None): """Send a message to a client. :param sid: The session id of the recipient client. :param data: The data to send to the client. Data can be of type ``str``, ``bytes``, ``list`` or ``dict``. If a ``list`` or ``dict``, the data will be serialized as JSON. :param binary: ``True`` to send packet as binary, ``False`` to send as text. If not given, unicode (Python 2) and str (Python 3) are sent as text, and str (Python 2) and bytes (Python 3) are sent as binary. """ try: socket = self._get_socket(sid) except KeyError: # the socket is not available self.logger.warning('Cannot send to sid %s', sid) return socket.send(packet.Packet(packet.MESSAGE, data=data, binary=binary)) def get_session(self, sid): """Return the user session for a client. :param sid: The session id of the client. The return value is a dictionary. Modifications made to this dictionary are not guaranteed to be preserved unless ``save_session()`` is called, or when the ``session`` context manager is used. """ socket = self._get_socket(sid) return socket.session def save_session(self, sid, session): """Store the user session for a client. :param sid: The session id of the client. :param session: The session dictionary. """ socket = self._get_socket(sid) socket.session = session def session(self, sid): """Return the user session for a client with context manager syntax. :param sid: The session id of the client. This is a context manager that returns the user session dictionary for the client. Any changes that are made to this dictionary inside the context manager block are saved back to the session. Example usage:: @eio.on('connect') def on_connect(sid, environ): username = authenticate_user(environ) if not username: return False with eio.session(sid) as session: session['username'] = username @eio.on('message') def on_message(sid, msg): with eio.session(sid) as session: print('received message from ', session['username']) """ class _session_context_manager(object): def __init__(self, server, sid): self.server = server self.sid = sid self.session = None def __enter__(self): self.session = self.server.get_session(sid) return self.session def __exit__(self, *args): self.server.save_session(sid, self.session) return _session_context_manager(self, sid) def disconnect(self, sid=None): """Disconnect a client. :param sid: The session id of the client to close. If this parameter is not given, then all clients are closed. """ if sid is not None: try: socket = self._get_socket(sid) except KeyError: # pragma: no cover # the socket was already closed or gone pass else: socket.close() if sid in self.sockets: # pragma: no cover del self.sockets[sid] else: for client in six.itervalues(self.sockets): client.close() self.sockets = {} def transport(self, sid): """Return the name of the transport used by the client. The two possible values returned by this function are ``'polling'`` and ``'websocket'``. :param sid: The session of the client. """ return 'websocket' if self._get_socket(sid).upgraded else 'polling' def handle_request(self, environ, start_response): """Handle an HTTP request from the client. This is the entry point of the Engine.IO application, using the same interface as a WSGI application. For the typical usage, this function is invoked by the :class:`Middleware` instance, but it can be invoked directly when the middleware is not used. :param environ: The WSGI environment. :param start_response: The WSGI ``start_response`` function. This function returns the HTTP response body to deliver to the client as a byte sequence. """ if self.cors_allowed_origins != []: # Validate the origin header if present # This is important for WebSocket more than for HTTP, since # browsers only apply CORS controls to HTTP. origin = environ.get('HTTP_ORIGIN') if origin: allowed_origins = self._cors_allowed_origins(environ) if allowed_origins is not None and origin not in \ allowed_origins: self.logger.info(origin + ' is not an accepted origin.') r = self._bad_request() start_response(r['status'], r['headers']) return [r['response']] method = environ['REQUEST_METHOD'] query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) sid = query['sid'][0] if 'sid' in query else None b64 = False jsonp = False jsonp_index = None if 'b64' in query: if query['b64'][0] == "1" or query['b64'][0].lower() == "true": b64 = True if 'j' in query: jsonp = True try: jsonp_index = int(query['j'][0]) except (ValueError, KeyError, IndexError): # Invalid JSONP index number pass if jsonp and jsonp_index is None: self.logger.warning('Invalid JSONP index number') r = self._bad_request() elif method == 'GET': if sid is None: transport = query.get('transport', ['polling'])[0] if transport != 'polling' and transport != 'websocket': self.logger.warning('Invalid transport %s', transport) r = self._bad_request() else: r = self._handle_connect(environ, start_response, transport, b64, jsonp_index) else: if sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: packets = socket.handle_get_request( environ, start_response) if isinstance(packets, list): r = self._ok(packets, b64=b64, jsonp_index=jsonp_index) else: r = packets except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) r = self._bad_request() if sid in self.sockets and self.sockets[sid].closed: del self.sockets[sid] elif method == 'POST': if sid is None or sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: socket.handle_post_request(environ) r = self._ok(jsonp_index=jsonp_index) except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) r = self._bad_request() except: # pragma: no cover # for any other unexpected errors, we log the error # and keep going self.logger.exception('post request handler error') r = self._ok(jsonp_index=jsonp_index) elif method == 'OPTIONS': r = self._ok() else: self.logger.warning('Method %s not supported', method) r = self._method_not_found() if not isinstance(r, dict): return r or [] if self.http_compression and \ len(r['response']) >= self.compression_threshold: encodings = [e.split(';')[0].strip() for e in environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] for encoding in encodings: if encoding in self.compression_methods: r['response'] = \ getattr(self, '_' + encoding)(r['response']) r['headers'] += [('Content-Encoding', encoding)] break cors_headers = self._cors_headers(environ) start_response(r['status'], r['headers'] + cors_headers) return [r['response']] def start_background_task(self, target, *args, **kwargs): """Start a background task using the appropriate async model. This is a utility function that applications can use to start a background task using the method that is compatible with the selected async mode. :param target: the target function to execute. :param args: arguments to pass to the function. :param kwargs: keyword arguments to pass to the function. This function returns an object compatible with the `Thread` class in the Python standard library. The `start()` method on this object is already called by this function. """ th = self._async['thread'](target=target, args=args, kwargs=kwargs) th.start() return th # pragma: no cover def sleep(self, seconds=0): """Sleep for the requested amount of time using the appropriate async model. This is a utility function that applications can use to put a task to sleep without having to worry about using the correct call for the selected async mode. """ return self._async['sleep'](seconds) def create_queue(self, *args, **kwargs): """Create a queue object using the appropriate async model. This is a utility function that applications can use to create a queue without having to worry about using the correct call for the selected async mode. """ return self._async['queue'](*args, **kwargs) def get_queue_empty_exception(self): """Return the queue empty exception for the appropriate async model. This is a utility function that applications can use to work with a queue without having to worry about using the correct call for the selected async mode. """ return self._async['queue_empty'] def create_event(self, *args, **kwargs): """Create an event object using the appropriate async model. This is a utility function that applications can use to create an event without having to worry about using the correct call for the selected async mode. """ return self._async['event'](*args, **kwargs) def _generate_id(self): """Generate a unique session id.""" return uuid.uuid4().hex def _handle_connect(self, environ, start_response, transport, b64=False, jsonp_index=None): """Handle a client connection request.""" if self.start_service_task: # start the service task to monitor connected clients self.start_service_task = False self.start_background_task(self._service_task) sid = self._generate_id() s = socket.Socket(self, sid) self.sockets[sid] = s pkt = packet.Packet( packet.OPEN, {'sid': sid, 'upgrades': self._upgrades(sid, transport), 'pingTimeout': int(self.ping_timeout * 1000), 'pingInterval': int(self.ping_interval * 1000)}) s.send(pkt) ret = self._trigger_event('connect', sid, environ, run_async=False) if ret is False: del self.sockets[sid] self.logger.warning('Application rejected connection') return self._unauthorized() if transport == 'websocket': ret = s.handle_get_request(environ, start_response) if s.closed: # websocket connection ended, so we are done del self.sockets[sid] return ret else: s.connected = True headers = None if self.cookie: headers = [('Set-Cookie', self.cookie + '=' + sid)] try: return self._ok(s.poll(), headers=headers, b64=b64, jsonp_index=jsonp_index) except exceptions.QueueEmpty: return self._bad_request() def _upgrades(self, sid, transport): """Return the list of possible upgrades for a client connection.""" if not self.allow_upgrades or self._get_socket(sid).upgraded or \ self._async['websocket'] is None or transport == 'websocket': return [] return ['websocket'] def _trigger_event(self, event, *args, **kwargs): """Invoke an event handler.""" run_async = kwargs.pop('run_async', False) if event in self.handlers: if run_async: return self.start_background_task(self.handlers[event], *args) else: try: return self.handlers[event](*args) except: self.logger.exception(event + ' handler error') if event == 'connect': # if connect handler raised error we reject the # connection return False def _get_socket(self, sid): """Return the socket object for a given session.""" try: s = self.sockets[sid] except KeyError: raise KeyError('Session not found') if s.closed: del self.sockets[sid] raise KeyError('Session is disconnected') return s def _ok(self, packets=None, headers=None, b64=False, jsonp_index=None): """Generate a successful HTTP response.""" if packets is not None: if headers is None: headers = [] if b64: headers += [('Content-Type', 'text/plain; charset=UTF-8')] else: headers += [('Content-Type', 'application/octet-stream')] return {'status': '200 OK', 'headers': headers, 'response': payload.Payload(packets=packets).encode( b64=b64, jsonp_index=jsonp_index)} else: return {'status': '200 OK', 'headers': [('Content-Type', 'text/plain')], 'response': b'OK'} def _bad_request(self): """Generate a bad request HTTP error response.""" return {'status': '400 BAD REQUEST', 'headers': [('Content-Type', 'text/plain')], 'response': b'Bad Request'} def _method_not_found(self): """Generate a method not found HTTP error response.""" return {'status': '405 METHOD NOT FOUND', 'headers': [('Content-Type', 'text/plain')], 'response': b'Method Not Found'} def _unauthorized(self): """Generate a unauthorized HTTP error response.""" return {'status': '401 UNAUTHORIZED', 'headers': [('Content-Type', 'text/plain')], 'response': b'Unauthorized'} def _cors_allowed_origins(self, environ): default_origins = [] if 'wsgi.url_scheme' in environ and 'HTTP_HOST' in environ: default_origins.append('{scheme}://{host}'.format( scheme=environ['wsgi.url_scheme'], host=environ['HTTP_HOST'])) if 'HTTP_X_FORWARDED_HOST' in environ: scheme = environ.get( 'HTTP_X_FORWARDED_PROTO', environ['wsgi.url_scheme']).split(',')[0].strip() default_origins.append('{scheme}://{host}'.format( scheme=scheme, host=environ['HTTP_X_FORWARDED_HOST'].split( ',')[0].strip())) if self.cors_allowed_origins is None: allowed_origins = default_origins elif self.cors_allowed_origins == '*': allowed_origins = None elif isinstance(self.cors_allowed_origins, six.string_types): allowed_origins = [self.cors_allowed_origins] else: allowed_origins = self.cors_allowed_origins return allowed_origins def _cors_headers(self, environ): """Return the cross-origin-resource-sharing headers.""" if self.cors_allowed_origins == []: # special case, CORS handling is completely disabled return [] headers = [] allowed_origins = self._cors_allowed_origins(environ) if 'HTTP_ORIGIN' in environ and \ (allowed_origins is None or environ['HTTP_ORIGIN'] in allowed_origins): headers = [('Access-Control-Allow-Origin', environ['HTTP_ORIGIN'])] if environ['REQUEST_METHOD'] == 'OPTIONS': headers += [('Access-Control-Allow-Methods', 'OPTIONS, GET, POST')] if 'HTTP_ACCESS_CONTROL_REQUEST_HEADERS' in environ: headers += [('Access-Control-Allow-Headers', environ['HTTP_ACCESS_CONTROL_REQUEST_HEADERS'])] if self.cors_credentials: headers += [('Access-Control-Allow-Credentials', 'true')] return headers def _gzip(self, response): """Apply gzip compression to a response.""" bytesio = six.BytesIO() with gzip.GzipFile(fileobj=bytesio, mode='w') as gz: gz.write(response) return bytesio.getvalue() def _deflate(self, response): """Apply deflate compression to a response.""" return zlib.compress(response) def _service_task(self): # pragma: no cover """Monitor connected clients and clean up those that time out.""" while True: if len(self.sockets) == 0: # nothing to do self.sleep(self.ping_timeout) continue # go through the entire client list in a ping interval cycle sleep_interval = float(self.ping_timeout) / len(self.sockets) try: # iterate over the current clients for s in self.sockets.copy().values(): if not s.closing and not s.closed: s.check_ping_timeout() self.sleep(sleep_interval) except (SystemExit, KeyboardInterrupt): self.logger.info('service task canceled') break except: # an unexpected exception has occurred, log it and continue self.logger.exception('service task exception') python-engineio-3.11.1/engineio/socket.py0000644000076600000240000002311713564316347021470 0ustar mgrinbergstaff00000000000000import six import sys import time from . import exceptions from . import packet from . import payload class Socket(object): """An Engine.IO socket.""" upgrade_protocols = ['websocket'] def __init__(self, server, sid): self.server = server self.sid = sid self.queue = self.server.create_queue() self.last_ping = time.time() self.connected = False self.upgrading = False self.upgraded = False self.packet_backlog = [] self.closing = False self.closed = False self.session = {} def poll(self): """Wait for packets to send to the client.""" queue_empty = self.server.get_queue_empty_exception() try: packets = [self.queue.get(timeout=self.server.ping_timeout)] self.queue.task_done() except queue_empty: raise exceptions.QueueEmpty() if packets == [None]: return [] while True: try: packets.append(self.queue.get(block=False)) self.queue.task_done() except queue_empty: break return packets def receive(self, pkt): """Receive packet from the client.""" packet_name = packet.packet_names[pkt.packet_type] \ if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN' self.server.logger.info('%s: Received packet %s data %s', self.sid, packet_name, pkt.data if not isinstance(pkt.data, bytes) else '') if pkt.packet_type == packet.PING: self.last_ping = time.time() self.send(packet.Packet(packet.PONG, pkt.data)) elif pkt.packet_type == packet.MESSAGE: self.server._trigger_event('message', self.sid, pkt.data, run_async=self.server.async_handlers) elif pkt.packet_type == packet.UPGRADE: self.send(packet.Packet(packet.NOOP)) elif pkt.packet_type == packet.CLOSE: self.close(wait=False, abort=True) else: raise exceptions.UnknownPacketError() def check_ping_timeout(self): """Make sure the client is still sending pings. This helps detect disconnections for long-polling clients. """ if self.closed: raise exceptions.SocketIsClosedError() if time.time() - self.last_ping > self.server.ping_interval + \ self.server.ping_interval_grace_period: self.server.logger.info('%s: Client is gone, closing socket', self.sid) # Passing abort=False here will cause close() to write a # CLOSE packet. This has the effect of updating half-open sockets # to their correct state of disconnected self.close(wait=False, abort=False) return False return True def send(self, pkt): """Send a packet to the client.""" if not self.check_ping_timeout(): return if self.upgrading: self.packet_backlog.append(pkt) else: self.queue.put(pkt) self.server.logger.info('%s: Sending packet %s data %s', self.sid, packet.packet_names[pkt.packet_type], pkt.data if not isinstance(pkt.data, bytes) else '') def handle_get_request(self, environ, start_response): """Handle a long-polling GET request from the client.""" connections = [ s.strip() for s in environ.get('HTTP_CONNECTION', '').lower().split(',')] transport = environ.get('HTTP_UPGRADE', '').lower() if 'upgrade' in connections and transport in self.upgrade_protocols: self.server.logger.info('%s: Received request to upgrade to %s', self.sid, transport) return getattr(self, '_upgrade_' + transport)(environ, start_response) try: packets = self.poll() except exceptions.QueueEmpty: exc = sys.exc_info() self.close(wait=False) six.reraise(*exc) return packets def handle_post_request(self, environ): """Handle a long-polling POST request from the client.""" length = int(environ.get('CONTENT_LENGTH', '0')) if length > self.server.max_http_buffer_size: raise exceptions.ContentTooLongError() else: body = environ['wsgi.input'].read(length) p = payload.Payload(encoded_payload=body) for pkt in p.packets: self.receive(pkt) def close(self, wait=True, abort=False): """Close the socket connection.""" if not self.closed and not self.closing: self.closing = True self.server._trigger_event('disconnect', self.sid, run_async=False) if not abort: self.send(packet.Packet(packet.CLOSE)) self.closed = True self.queue.put(None) if wait: self.queue.join() def _upgrade_websocket(self, environ, start_response): """Upgrade the connection from polling to websocket.""" if self.upgraded: raise IOError('Socket has been upgraded already') if self.server._async['websocket'] is None: # the selected async mode does not support websocket return self.server._bad_request() ws = self.server._async['websocket'](self._websocket_handler) return ws(environ, start_response) def _websocket_handler(self, ws): """Engine.IO handler for websocket transport.""" # try to set a socket timeout matching the configured ping interval for attr in ['_sock', 'socket']: # pragma: no cover if hasattr(ws, attr) and hasattr(getattr(ws, attr), 'settimeout'): getattr(ws, attr).settimeout(self.server.ping_timeout) if self.connected: # the socket was already connected, so this is an upgrade self.upgrading = True # hold packet sends during the upgrade pkt = ws.wait() decoded_pkt = packet.Packet(encoded_packet=pkt) if decoded_pkt.packet_type != packet.PING or \ decoded_pkt.data != 'probe': self.server.logger.info( '%s: Failed websocket upgrade, no PING packet', self.sid) return [] ws.send(packet.Packet( packet.PONG, data=six.text_type('probe')).encode(always_bytes=False)) self.queue.put(packet.Packet(packet.NOOP)) # end poll pkt = ws.wait() decoded_pkt = packet.Packet(encoded_packet=pkt) if decoded_pkt.packet_type != packet.UPGRADE: self.upgraded = False self.server.logger.info( ('%s: Failed websocket upgrade, expected UPGRADE packet, ' 'received %s instead.'), self.sid, pkt) return [] self.upgraded = True # flush any packets that were sent during the upgrade for pkt in self.packet_backlog: self.queue.put(pkt) self.packet_backlog = [] self.upgrading = False else: self.connected = True self.upgraded = True # start separate writer thread def writer(): while True: packets = None try: packets = self.poll() except exceptions.QueueEmpty: break if not packets: # empty packet list returned -> connection closed break try: for pkt in packets: ws.send(pkt.encode(always_bytes=False)) except: break writer_task = self.server.start_background_task(writer) self.server.logger.info( '%s: Upgrade to websocket successful', self.sid) while True: p = None try: p = ws.wait() except Exception as e: # if the socket is already closed, we can assume this is a # downstream error of that if not self.closed: # pragma: no cover self.server.logger.info( '%s: Unexpected error "%s", closing connection', self.sid, str(e)) break if p is None: # connection closed by client break if isinstance(p, six.text_type): # pragma: no cover p = p.encode('utf-8') pkt = packet.Packet(encoded_packet=p) try: self.receive(pkt) except exceptions.UnknownPacketError: # pragma: no cover pass except exceptions.SocketIsClosedError: # pragma: no cover self.server.logger.info('Receive error -- socket is closed') break except: # pragma: no cover # if we get an unexpected exception we log the error and exit # the connection properly self.server.logger.exception('Unknown receive error') break self.queue.put(None) # unlock the writer task so that it can exit writer_task.join() self.close(wait=False, abort=True) return [] python-engineio-3.11.1/engineio/static_files.py0000644000076600000240000000347313564316347022654 0ustar mgrinbergstaff00000000000000content_types = { 'css': 'text/css', 'gif': 'image/gif', 'html': 'text/html', 'jpg': 'image/jpeg', 'js': 'application/javascript', 'json': 'application/json', 'png': 'image/png', 'txt': 'text/plain', } def get_static_file(path, static_files): """Return the local filename and content type for the requested static file URL. :param path: the path portion of the requested URL. :param static_files: a static file configuration dictionary. This function returns a dictionary with two keys, "filename" and "content_type". If the requested URL does not match any static file, the return value is None. """ if path in static_files: f = static_files[path] else: f = None rest = '' while path != '': path, last = path.rsplit('/', 1) rest = '/' + last + rest if path in static_files: f = static_files[path] + rest break elif path + '/' in static_files: f = static_files[path + '/'] + rest[1:] break if f: if isinstance(f, str): f = {'filename': f} if f['filename'].endswith('/'): if '' in static_files: if isinstance(static_files[''], str): f['filename'] += static_files[''] else: f['filename'] += static_files['']['filename'] if 'content_type' in static_files['']: f['content_type'] = static_files['']['content_type'] else: f['filename'] += 'index.html' if 'content_type' not in f: ext = f['filename'].rsplit('.')[-1] f['content_type'] = content_types.get( ext, 'application/octet-stream') return f python-engineio-3.11.1/python_engineio.egg-info/0000755000076600000240000000000013574014331022702 5ustar mgrinbergstaff00000000000000python-engineio-3.11.1/python_engineio.egg-info/PKG-INFO0000644000076600000240000000336613574014331024007 0ustar mgrinbergstaff00000000000000Metadata-Version: 2.1 Name: python-engineio Version: 3.11.1 Summary: Engine.IO server Home-page: http://github.com/miguelgrinberg/python-engineio/ Author: Miguel Grinberg Author-email: miguelgrinberg50@gmail.com License: MIT Description: python-engineio =============== .. image:: https://travis-ci.org/miguelgrinberg/python-engineio.svg?branch=master :target: https://travis-ci.org/miguelgrinberg/python-engineio Python implementation of the `Engine.IO`_ realtime client and server. Resources --------- - `Documentation`_ - `PyPI`_ - `Change Log`_ - Questions? See the `questions`_ others have asked on Stack Overflow, or `ask`_ your own question. .. _Engine.IO: https://github.com/socketio/engine.io .. _Documentation: https://python-engineio.readthedocs.io/en/latest/ .. _PyPI: https://pypi.python.org/pypi/python-engineio .. _Change Log: https://github.com/miguelgrinberg/python-engineio/blob/master/CHANGES.md .. _questions: https://stackoverflow.com/questions/tagged/python-socketio .. _ask: https://stackoverflow.com/questions/ask?tags=python+python-socketio Platform: any Classifier: Environment :: Web Environment Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content Classifier: Topic :: Software Development :: Libraries :: Python Modules Provides-Extra: asyncio_client Provides-Extra: client python-engineio-3.11.1/python_engineio.egg-info/SOURCES.txt0000644000076600000240000000152113574014331024565 0ustar mgrinbergstaff00000000000000LICENSE MANIFEST.in README.rst setup.py engineio/__init__.py engineio/asyncio_client.py engineio/asyncio_server.py engineio/asyncio_socket.py engineio/client.py engineio/exceptions.py engineio/middleware.py engineio/packet.py engineio/payload.py engineio/server.py engineio/socket.py engineio/static_files.py engineio/async_drivers/__init__.py engineio/async_drivers/aiohttp.py engineio/async_drivers/asgi.py engineio/async_drivers/eventlet.py engineio/async_drivers/gevent.py engineio/async_drivers/gevent_uwsgi.py engineio/async_drivers/sanic.py engineio/async_drivers/threading.py engineio/async_drivers/tornado.py python_engineio.egg-info/PKG-INFO python_engineio.egg-info/SOURCES.txt python_engineio.egg-info/dependency_links.txt python_engineio.egg-info/not-zip-safe python_engineio.egg-info/requires.txt python_engineio.egg-info/top_level.txtpython-engineio-3.11.1/python_engineio.egg-info/dependency_links.txt0000644000076600000240000000000113574014331026750 0ustar mgrinbergstaff00000000000000 python-engineio-3.11.1/python_engineio.egg-info/not-zip-safe0000644000076600000240000000000113564316405025136 0ustar mgrinbergstaff00000000000000 python-engineio-3.11.1/python_engineio.egg-info/requires.txt0000644000076600000240000000013613574014331025302 0ustar mgrinbergstaff00000000000000six>=1.9.0 [asyncio_client] aiohttp>=3.4 [client] requests>=2.21.0 websocket-client>=0.54.0 python-engineio-3.11.1/python_engineio.egg-info/top_level.txt0000644000076600000240000000001113574014331025424 0ustar mgrinbergstaff00000000000000engineio python-engineio-3.11.1/setup.cfg0000644000076600000240000000004613574014331017633 0ustar mgrinbergstaff00000000000000[egg_info] tag_build = tag_date = 0 python-engineio-3.11.1/setup.py0000755000076600000240000000312613566443426017545 0ustar mgrinbergstaff00000000000000""" python-engineio --------------- Engine.IO server. """ import re import sys from setuptools import setup with open('engineio/__init__.py', 'r') as f: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1) with open('README.rst', 'r') as f: long_description = f.read() setup( name='python-engineio', version=version, url='http://github.com/miguelgrinberg/python-engineio/', license='MIT', author='Miguel Grinberg', author_email='miguelgrinberg50@gmail.com', description='Engine.IO server', long_description=long_description, packages=['engineio', 'engineio.async_drivers'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'six>=1.9.0', ], extras_require={ 'client': [ 'requests>=2.21.0', 'websocket-client>=0.54.0', ], 'asyncio_client': [ 'aiohttp>=3.4', ] }, tests_require=[ 'mock', 'eventlet', ], test_suite='tests' if sys.version_info >= (3, 0) else 'tests.common', classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )