How do I use asyncio.sslproto.SSLProtocol? - async-await

I am trying to talk to an ElectrumX server using JSON-RPC over TLS, but through Tor (SOCKS proxy on localhost).
When running the following code, drain() in asyncio.streams is calling _drain_helper in SSLProtocol, which I don't know how to implement.
If I just make it a no-op, it seems to not work.
I know that the JSON-RPC server is working because I have tested with
echo -ne '{"id":0,"args":["3.0.2","1.1"],"method":"server.version"}\n' | socat stdio openssl-connect:songbird.bauerj.eu:50002,verify=0
My attempt at using TLS through SOCKS in Python with asyncio:
from asyncio.sslproto import SSLProtocol
import aiosocks
import asyncio
loop = None
class MySSLProtocol(SSLProtocol):
def __init__(otherself):
super().__init__(loop, None, False , None)
# app_proto context waiter
async def l(fut):
try:
socks4addr = aiosocks.Socks4Addr("127.0.0.1", 9050)
transport, protocol = await aiosocks.create_connection(MySSLProtocol, proxy=socks4addr, proxy_auth=None, dst=("songbird.bauerj.eu", 50002))
reader = asyncio.StreamReader()
reader.set_transport(transport)
writer = asyncio.StreamWriter(transport, protocol, reader, loop)
writer.write(b'{"id":0,"method":"server.version","args":["3.0.2", "1.1"]}\n')
await writer.drain()
print(await reader.readuntil(b"\n"))
fut.set_result("finished")
except BaseException as e:
fut.set_exception(e)
def f():
global loop
loop = asyncio.get_event_loop()
fut = asyncio.Future()
asyncio.ensure_future(l(fut))
loop.run_until_complete(fut)
print(fut.result())
loop.close()
f()
I am using aiosocks from master. Commit 932374c

asyncio.sslproto and SSLProtocol are part of asyncio private API.
You should never use the class directly or derive from it.
For working with SSL please pass normal protocol (derived from asyncio.Protocol) and ssl.SSLContext as ssl param into loop.create_connection() / loop.create_server().

I'm using it like so:
class HTTP(asyncio.Protocol):
def __init__(self, config: Config):
self.config = config
def data_received(self, data) -> None:
print(data)
class HTTPS:
def __new__(cls, config: Config):
ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ssl_context.load_cert_chain(config.rsa_cert, config.rsa_key)
return asyncio.sslproto.SSLProtocol(
loop=asyncio.get_running_loop(),
app_protocol=HTTP(config),
sslcontext=ssl_context,
waiter=None,
server_side=True,
)
See mitm for working example. Not the way I would've chosen to use it, but the only way I could figure out. From my knowledge there is no way to upgrade an asyncio.Protocol to use SSL - you must create a new one.

Related

DJANGO-STORAGES, PARAMIKO: connection failure for global connection

I have a strange problem using the SFTP-API from django-storages(https://github.com/jschneier/django-storages). I am trying to use it in order to fetch media-files, which are stored on a different server and thus needed to create a Proxy for SFTP Downloads, since plain Django just sends GET-requests to the MEDIA_ROOT. I figured that Middleware provides a good hook:
import mimetypes
from storages.backends.sftpstorage import SFTPStorage
from django.http import HttpResponse
from storages.backends.sftpstorage import SFTPStorage
class SFTPMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
try:
path = request.get_full_path()
SFTP = SFTPStorage() # <- this is where the magic happens
if SFTP.exists(path):
file = SFTP._read(path)
type, encoding = mimetypes.guess_type(path)
response = HttpResponse(file, content_type=type)
response['Content-Disposition'] = u'attachment; filename="{filename}"'.format(filename=path)
except PermissionError:
pass
return response
which works fine, but obviously it opens a new connection every time a website call is issued which I don't want (it also crashes after 3 reloads or something, I think it has to many parallel connections by then). So I tried just opening one connection to the Server via SFTP by moving the SFTP = SFTPStorage()-initialization into the __init__()-method which is just called once:
import mimetypes
from storages.backends.sftpstorage import SFTPStorage
from django.http import HttpResponse
from storages.backends.sftpstorage import SFTPStorage
class SFTPMiddleware:
def __init__(self, get_response):
self.get_response = get_response
self.SFTP = SFTPStorage() # <- this is where the magic happens
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
response = self.get_response(request)
try:
path = request.get_full_path()
if self.SFTP.exists(path):
file = self.SFTP._read(path)
type, encoding = mimetypes.guess_type(path)
response = HttpResponse(file, content_type=type)
response['Content-Disposition'] = u'attachment; filename="{filename}"'.format(filename=path)
except PermissionError:
pass
return response
But this implementation doesn't seem to work, the program is stuck either before the SFTP.exists() or after the SFTP._read() methods.
Can anybody tell me how to fix this problem? Or does anybody even have a better idea as to how to tackle this problem?
Thanks in advance,
Kingrimursel

Autobahn websocket client in Quart (async Flask) application

Good evening everyone. I'm not quite new to this place but finally decided to register and ask for a help. I develop a web application using Quart framework (asynchronous Flask). And now as application became bigger and more complex I decided to separate different procedures to different server instances, this is mostly because I want to keep web server clean, more abstract and free of computational load.
So I plan to use one web server with a few (if needed) identical procedure servers. All servers are based on quart framework, for now just for simplicity of development. I decided to use Crossbar.io router and autobahn to connect all servers together.
And here the problem occurred.
I followed this posts:
Running several ApplicationSessions non-blockingly using autbahn.asyncio.wamp
How can I implement an interactive websocket client with autobahn asyncio?
How I can integrate crossbar client (python3,asyncio) with tkinter
How to send Autobahn/Twisted WAMP message from outside of protocol?
Seems like I tried all possible approaches to implement autobahn websocket client in my quart application. I don't know how to make it possible so both things are working, whether Quart app works but autobahn WS client does not, or vice versa.
Simplified my quart app looks like this:
from quart import Quart, request, current_app
from config import Config
# Autobahn
import asyncio
from autobahn import wamp
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
import concurrent.futures
class Component(ApplicationSession):
"""
An application component registering RPC endpoints using decorators.
"""
async def onJoin(self, details):
# register all methods on this object decorated with "#wamp.register"
# as a RPC endpoint
##
results = await self.register(self)
for res in results:
if isinstance(res, wamp.protocol.Registration):
# res is an Registration instance
print("Ok, registered procedure with registration ID {}".format(res.id))
else:
# res is an Failure instance
print("Failed to register procedure: {}".format(res))
#wamp.register(u'com.mathservice.add2')
def add2(self, x, y):
return x + y
def create_app(config_class=Config):
app = Quart(__name__)
app.config.from_object(config_class)
# Blueprint registration
from app.main import bp as main_bp
app.register_blueprint(main_bp)
print ("before autobahn start")
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
runner = ApplicationRunner('ws://127.0.0.1:8080 /ws', 'realm1')
future = executor.submit(runner.run(Component))
print ("after autobahn started")
return app
from app import models
In this case application stuck in runner loop and whole application does not work (can not serve requests), it becomes possible only if I interrupt the runners(autobahn) loop by Ctrl-C.
CMD after start:
(quart-app) user#car:~/quart-app$ hypercorn --debug --error-log - --access-log - -b 0.0.0.0:8001 tengine:app
Running on 0.0.0.0:8001 over http (CTRL + C to quit)
before autobahn start
Ok, registered procedure with registration ID 4605315769796303
after pressing ctrl-C:
...
^Cafter autobahn started
2019-03-29T01:06:52 <Server sockets=[<socket.socket fd=11, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=0, laddr=('0.0.0.0', 8001)>]> is serving
How to make it possible to work quart application with autobahn client together in non-blocking fashion? So autobahn opens and keeps websocket connection to Crossbar router and silently listen on background.
Well, after many sleepless nights I finally found a good approach to solve this conundrum.
Thanks to this post C-Python asyncio: running discord.py in a thread
So, I rewrote my code like this and was able to run my Quart app with autobahn client inside, and both are actively working in nonblocking fashion.
The whole __init__.py looks like:
from quart import Quart, request, current_app
from config import Config
def create_app(config_class=Config):
app = Quart(__name__)
app.config.from_object(config_class)
# Blueprint registration
from app.main import bp as main_bp
app.register_blueprint(main_bp)
return app
# Autobahn
import asyncio
from autobahn import wamp
from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner
import threading
class Component(ApplicationSession):
"""
An application component registering RPC endpoints using decorators.
"""
async def onJoin(self, details):
# register all methods on this object decorated with "#wamp.register"
# as a RPC endpoint
##
results = await self.register(self)
for res in results:
if isinstance(res, wamp.protocol.Registration):
# res is an Registration instance
print("Ok, registered procedure with registration ID {}".format(res.id))
else:
# res is an Failure instance
print("Failed to register procedure: {}".format(res))
def onDisconnect(self):
print('Autobahn disconnected')
#wamp.register(u'com.mathservice.add2')
def add2(self, x, y):
return x + y
async def start():
runner = ApplicationRunner('ws://127.0.0.1:8080/ws', 'realm1')
await runner.run(Component) # use client.start instead of client.run
def run_it_forever(loop):
loop.run_forever()
asyncio.get_child_watcher() # I still don't know if I need this method. It works without it.
loop = asyncio.get_event_loop()
loop.create_task(start())
print('Starting thread for Autobahn...')
thread = threading.Thread(target=run_it_forever, args=(loop,))
thread.start()
print ("Thread for Autobahn has been started...")
from app import models
With this scenario we create task with autobahn's runner.run and attach it to the current loop and then run this loop forever in new thread.
I was quite satisfied with current solution.... but then then was found out that this solution has some drawbacks, that was crucial for me, for example: reconnect if connection dropped (i.e crossbar router becomes unavailable). With this approach if connection was failed to initialize or dropped after a while it will not try to reconnect. Additionally for me it wasn't obvious how to ApplicationSession API, i.e. to register/call RPC from the code in my quart app.
Luckily I spotted another new component API that autobahn used in their documentation:
https://autobahn.readthedocs.io/en/latest/wamp/programming.html#registering-procedures
https://github.com/crossbario/autobahn-python/blob/master/examples/asyncio/wamp/component/backend.py
It has auto reconnect feature and it's easy to register functions for RPC using decorators #component.register('com.something.do'), you just need to import component before.
So here is the final view of __init__.py solution:
from quart import Quart, request, current_app
from config import Config
def create_app(config_class=Config):
...
return app
from autobahn.asyncio.component import Component, run
from autobahn.wamp.types import RegisterOptions
import asyncio
import ssl
import threading
component = Component(
transports=[
{
"type": "websocket",
"url": u"ws://localhost:8080/ws",
"endpoint": {
"type": "tcp",
"host": "localhost",
"port": 8080,
},
"options": {
"open_handshake_timeout": 100,
}
},
],
realm=u"realm1",
)
#component.on_join
def join(session, details):
print("joined {}".format(details))
async def start():
await component.start() #used component.start() instead of run([component]) as it's async function
def run_it_forever(loop):
loop.run_forever()
loop = asyncio.get_event_loop()
#asyncio.get_child_watcher() # I still don't know if I need this method. It works without it.
asyncio.get_child_watcher().attach_loop(loop)
loop.create_task(start())
print('Starting thread for Autobahn...')
thread = threading.Thread(target=run_it_forever, args=(loop,))
thread.start()
print ("Thread for Autobahn has been started...")
from app import models
I hope it will help somebody. Cheers!

How to reuse aiohttp ClientSession pool?

The docs say to reuse the ClientSession:
Don’t create a session per request. Most likely you need a session per
application which performs all requests altogether.
A session contains a connection pool inside, connection reusage and
keep-alives (both are on by default) may speed up total performance.1
But there doesn't seem to be any explanation in the docs about how to do this? There is one example that's maybe relevant, but it does not show how to reuse the pool elsewhere: http://aiohttp.readthedocs.io/en/stable/client.html#keep-alive-connection-pooling-and-cookie-sharing
Would something like this be the correct way to do it?
#app.listener('before_server_start')
async def before_server_start(app, loop):
app.pg_pool = await asyncpg.create_pool(**DB_CONFIG, loop=loop, max_size=100)
app.http_session_pool = aiohttp.ClientSession()
#app.listener('after_server_stop')
async def after_server_stop(app, loop):
app.http_session_pool.close()
app.pg_pool.close()
#app.post("/api/register")
async def register(request):
# json validation
async with app.pg_pool.acquire() as pg:
await pg.execute() # create unactivated user in db
async with app.http_session_pool as session:
# TODO send activation email using SES API
async with session.post('http://httpbin.org/post', data=b'data') as resp:
print(resp.status)
print(await resp.text())
return HTTPResponse(status=204)
There're few things I think can be improved:
1)
Instance of ClientSession is one session object. This on session contains pool of connections, but it's not "session_pool" itself. I would suggest rename http_session_pool to http_session or may be client_session.
2)
Session's close() method is a corountine. Your should await it:
await app.client_session.close()
Or even better (IMHO), instead of thinking about how to properly open/close session use standard async context manager with awaiting of __aenter__ / __aexit__:
#app.listener('before_server_start')
async def before_server_start(app, loop):
# ...
app.client_session = await aiohttp.ClientSession().__aenter__()
#app.listener('after_server_stop')
async def after_server_stop(app, loop):
await app.client_session.__aexit__(None, None, None)
# ...
3)
Pay attention to this info:
However, if the event loop is stopped before the underlying connection
is closed, an ResourceWarning: unclosed transport warning is emitted
(when warnings are enabled).
To avoid this situation, a small delay must be added before closing
the event loop to allow any open underlying connections to close.
I'm not sure it's mandatory in your case but there's nothing bad in adding await asyncio.sleep(0) inside after_server_stop as documentation advices:
#app.listener('after_server_stop')
async def after_server_stop(app, loop):
# ...
await asyncio.sleep(0) # http://aiohttp.readthedocs.io/en/stable/client.html#graceful-shutdown
Upd:
Class that implements __aenter__ / __aexit__ can be used as async context manager (can be used in async with statement). It allows to do some actions before executing internal block and after it. This is very similar to regular context managers, but asyncio related. Same as regular context manager async one can be used directly (without async with) manually awaiting __aenter__ / __aexit__.
Why do I think it's better to create/free session using __aenter__ / __aexit__ manually instead of using close(), for example? Because we shouldn't worry what actually happens inside __aenter__ / __aexit__. Imagine in future versions of aiohttp creating of session will be changed with the need to await open() for example. If you'll use __aenter__ / __aexit__ you wouldn't need to somehow change your code.
seems no session pool in aiohttp.
// just post some official docs.
persistent session
here is persistent-session usage demo in official site
https://docs.aiohttp.org/en/latest/client_advanced.html#persistent-session
app.cleanup_ctx.append(persistent_session)
async def persistent_session(app):
app['PERSISTENT_SESSION'] = session = aiohttp.ClientSession()
yield
await session.close()
async def my_request_handler(request):
session = request.app['PERSISTENT_SESSION']
async with session.get("http://python.org") as resp:
print(resp.status)
//TODO: a full runnable demo code
connection pool
and it has a connection pool:
https://docs.aiohttp.org/en/latest/client_advanced.html#connectors
conn = aiohttp.TCPConnector()
#conn = aiohttp.TCPConnector(limit=30)
#conn = aiohttp.TCPConnector(limit=0) # nolimit, default is 100.
#conn = aiohttp.TCPConnector(limit_per_host=30) # default is 0
session = aiohttp.ClientSession(connector=conn)
I found this question after searching on Google on how to reuse an aiohttp ClientSession instance after my code was triggering this warning message: UserWarning: Creating a client session outside of coroutine is a very dangerous idea
This code may not solve the above problem though it is related. I am new to asyncio and aiohttp, so this may not be best practice. It's the best I could come up with after reading a lot of seemingly conflicting information.
I created a class ResourceManager taken from the Python docs that opens a context.
The ResourceManager instance handles the opening and closing of the aiohttp ClientSession instance via the magic methods __aenter__ and __aexit__ with BaseScraper.set_session and BaseScraper.close_session wrapper methods.
I was able to reuse a ClientSession instance with the following code.
The BaseScraper class also has methods for authentication. It depends on the lxml third-party package.
import asyncio
from time import time
from contextlib import contextmanager, AbstractContextManager, ExitStack
import aiohttp
import lxml.html
class ResourceManager(AbstractContextManager):
# Code taken from Python docs: 29.6.2.4. of https://docs.python.org/3.6/library/contextlib.html
def __init__(self, scraper, check_resource_ok=None):
self.acquire_resource = scraper.acquire_resource
self.release_resource = scraper.release_resource
if check_resource_ok is None:
def check_resource_ok(resource):
return True
self.check_resource_ok = check_resource_ok
#contextmanager
def _cleanup_on_error(self):
with ExitStack() as stack:
stack.push(self)
yield
# The validation check passed and didn't raise an exception
# Accordingly, we want to keep the resource, and pass it
# back to our caller
stack.pop_all()
def __enter__(self):
resource = self.acquire_resource()
with self._cleanup_on_error():
if not self.check_resource_ok(resource):
msg = "Failed validation for {!r}"
raise RuntimeError(msg.format(resource))
return resource
def __exit__(self, *exc_details):
# We don't need to duplicate any of our resource release logic
self.release_resource()
class BaseScraper:
login_url = ""
login_data = dict() # dict of key, value pairs to fill the login form
loop = asyncio.get_event_loop()
def __init__(self, urls):
self.urls = urls
self.acquire_resource = self.set_session
self.release_resource = self.close_session
async def _set_session(self):
self.session = await aiohttp.ClientSession().__aenter__()
def set_session(self):
set_session_attr = self.loop.create_task(self._set_session())
self.loop.run_until_complete(set_session_attr)
return self # variable after "as" becomes instance of BaseScraper
async def _close_session(self):
await self.session.__aexit__(None, None, None)
def close_session(self):
close_session = self.loop.create_task(self._close_session())
self.loop.run_until_complete(close_session)
def __call__(self):
fetch_urls = self.loop.create_task(self._fetch())
return self.loop.run_until_complete(fetch_urls)
async def _get(self, url):
async with self.session.get(url) as response:
result = await response.read()
return url, result
async def _fetch(self):
tasks = (self.loop.create_task(self._get(url)) for url in self.urls)
start = time()
results = await asyncio.gather(*tasks)
print(
"time elapsed: {} seconds \nurls count: {}".format(
time() - start, len(urls)
)
)
return results
#property
def form(self):
"""Create and return form for authentication."""
form = aiohttp.FormData(self.login_data)
get_login_page = self.loop.create_task(self._get(self.login_url))
url, login_page = self.loop.run_until_complete(get_login_page)
login_html = lxml.html.fromstring(login_page)
hidden_inputs = login_html.xpath(r'//form//input[#type="hidden"]')
login_form = {x.attrib["name"]: x.attrib["value"] for x in hidden_inputs}
for key, value in login_form.items():
form.add_field(key, value)
return form
async def _login(self, form):
async with self.session.post(self.login_url, data=form) as response:
if response.status != 200:
response.raise_for_status()
print("logged into {}".format(url))
await response.release()
def login(self):
post_login_form = self.loop.create_task(self._login(self.form))
self.loop.run_until_complete(post_login_form)
if __name__ == "__main__":
urls = ("http://example.com",) * 10
base_scraper = BaseScraper(urls)
with ResourceManager(base_scraper) as scraper:
for url, html in scraper():
print(url, len(html))

how to run a remote command with telnetlib3 on python asyncio

I'm trying to write a simple telnet client that just runs a single command on a remote box using telnet. This needs to run over asyncio as other tasks are monitored at the same time under that framework.
I got it almost working, with the code below, that I tweaked from telnet-client as part of the telnetlib3 library; except that it does not return. I've had a hard time trying to figure what this protocol.waiter_closed is all about.
In any case, how do I need to tweak this code so that it returns once the command has been dealt with on the remote end ?
Thanks
#!/usr/bin/env python3
import logging
import asyncio
import telnetlib3
# just to check that connection is thrown away
class MyClient(telnetlib3.TelnetClient):
def connection_lost(self, *args):
print("connection lost on client {} - args={}".format(self, args))
#asyncio.coroutine
def register_telnet_command(loop, Client, host, port, command):
transport, protocol = yield from loop.create_connection(Client, host, port)
print("{} async connection OK for command {}".format(host, command))
def send_command():
EOF = chr(4)
EOL = '\n'
# adding newline and end-of-file for this simple example
command_line = command + EOL + EOF
protocol.stream.write(protocol.shell.encode(command_line))
# one shot invokation of the command
loop.call_soon(send_command)
# what does this do exactly ?
yield from protocol.waiter_closed
port = 23
hostname = "fit01"
def main():
def ClientFactory():
return MyClient(encoding='utf-8', shell = telnetlib3.TerminalShell)
# create as many clients as we have hosts
loop = asyncio.get_event_loop()
loop.run_until_complete(
register_telnet_command(loop, log, ClientFactory,
host = hostname, port = port,
command = "id"))
return 0
main()
Sorry, my mistake, redefining close_connection without calling the code from telnetlib3.connection_lost is a bad idea, since this is the code that populates waiter_closed.
I should have done
class MyClient(telnetlib3.TelnetClient):
def connection_lost(self, *args):
print("connection lost on client {} - args={}".format(self, args))
super().connection_lost(*args)

WebSockets Proxy for IPython Notebook using Flask and WebSocket-for-Python (ws4py)

Inspired by ipython-notebook-proxy, and based on ipydra, and extending the latter to support more complex user authentication as well as a proxy, because in my use case, only port 80 can be exposed.
I am using flask-sockets for the gunicorn worker, but I am having troubles to proxy WebSockets. IPython uses three different WebSockets connections, /shell, /stdin, and /iopub, but I am only able to get the 101 Switching Protocols for the first two. And /stdin receives a Connection Close Frame as soon as is created.
This is the excerpt code in question:
# Flask imports...
from werkzeug import LocalProxy
from ws4py.client.geventclient import WebSocketClient
# I use my own LocalProxy because flask-sockets does not support Werkzeug Rules
websocket = LocalProxy(lambda: request.environ.get('wsgi.websocket', None))
websockets = {}
PROXY_DOMAIN = "127.0.0.1:8888" # IPython host and port
methods = ["GET", "POST", "PUT", "DELETE", "HEAD", "OPTIONS", "PATCH",
"CONNECT"]
#app.route('/', defaults={'url': ''}, methods=methods)
#app.route('/<path:url>', methods=methods)
def proxy(url):
with app.test_request_context():
if websocket:
while True:
data = websocket.receive()
websocket_url = 'ws://{}/{}'.format(PROXY_DOMAIN, url)
if websocket_url not in websockets:
client = WebSocketClient(websocket_url,
protocols=['http-only', 'chat'])
websockets[websocket_url] = client
else:
client = websockets[websocket_url]
client.connect()
if data:
client.send(data)
client_data = client.receive()
if client_data:
websocket.send(client_data)
return Response()
I also tried to create my own WebSocket proxy class, but it doesn't work either.
class WebSocketProxy(WebSocketClient):
def __init__(self, to, *args, **kwargs):
self.to = to
print(("Proxy to", self.to))
super(WebSocketProxy, self).__init__(*args, **kwargs)
def opened(self):
m = self.to.receive()
print("<= %d %s" % (len(m), str(m)))
self.send(m)
def closed(self, code, reason):
print(("Closed down", code, reason))
def received_message(self, m):
print("=> %d %s" % (len(m), str(m)))
self.to.send(m)
Regular request-response cycle works like a charm, so I removed that code. If interested, the complete code is hosted in hidra.
I run the server with
$ gunicorn -k flask_sockets.worker hidra:app
Here is my solution(ish). It is crude, but should serve as a starting point for building websocket proxy. The full code is available in unreleased project, pyramid_notebook.
This uses ws4py and uWSGI instead of gunicorn
We use uWSGI's internal mechanism to receive downstream websocket message loop. There is nothing like WSGI for websockets in Python world (yet?), but looks like every web server implements its own mechanism.
A custom ws4py ProxyConnection is created which can combine ws4py event loop with uWSGI event loop
The thing is started and messages start fly around
This uses Pyramid request (based on WebOb), but this really shouldn't matter and code should be fine for any Python WSGI app with little modifications
As you can see, this does not really take advantage of asynchronicity, but just sleep() if there is nothing coming in from the socket
Code goes here:
"""UWSGI websocket proxy."""
from urllib.parse import urlparse, urlunparse
import logging
import time
import uwsgi
from ws4py import WS_VERSION
from ws4py.client import WebSocketBaseClient
#: HTTP headers we need to proxy to upstream websocket server when the Connect: upgrade is performed
CAPTURE_CONNECT_HEADERS = ["sec-websocket-extensions", "sec-websocket-key", "origin"]
logger = logging.getLogger(__name__)
class ProxyClient(WebSocketBaseClient):
"""Proxy between upstream WebSocket server and downstream UWSGI."""
#property
def handshake_headers(self):
"""
List of headers appropriate for the upgrade
handshake.
"""
headers = [
('Host', self.host),
('Connection', 'Upgrade'),
('Upgrade', 'websocket'),
('Sec-WebSocket-Key', self.key.decode('utf-8')),
# Origin is proxyed from the downstream server, don't set it twice
# ('Origin', self.url),
('Sec-WebSocket-Version', str(max(WS_VERSION)))
]
if self.protocols:
headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols)))
if self.extra_headers:
headers.extend(self.extra_headers)
logger.info("Handshake headers: %s", headers)
return headers
def received_message(self, m):
"""Push upstream messages to downstream."""
# TODO: No support for binary messages
m = str(m)
logger.debug("Incoming upstream WS: %s", m)
uwsgi.websocket_send(m)
logger.debug("Send ok")
def handshake_ok(self):
"""
Called when the upgrade handshake has completed
successfully.
Starts the client's thread.
"""
self.run()
def terminate(self):
raise RuntimeError("NO!")
super(ProxyClient, self).terminate()
def run(self):
"""Combine async uwsgi message loop with ws4py message loop.
TODO: This could do some serious optimizations and behave asynchronously correct instead of just sleep().
"""
self.sock.setblocking(False)
try:
while not self.terminated:
logger.debug("Doing nothing")
time.sleep(0.050)
logger.debug("Asking for downstream msg")
msg = uwsgi.websocket_recv_nb()
if msg:
logger.debug("Incoming downstream WS: %s", msg)
self.send(msg)
s = self.stream
self.opened()
logger.debug("Asking for upstream msg")
try:
bytes = self.sock.recv(self.reading_buffer_size)
if bytes:
self.process(bytes)
except BlockingIOError:
pass
except Exception as e:
logger.exception(e)
finally:
logger.info("Terminating WS proxy loop")
self.terminate()
def serve_websocket(request, port):
"""Start UWSGI websocket loop and proxy."""
env = request.environ
# Send HTTP response 101 Switch Protocol downstream
uwsgi.websocket_handshake(env['HTTP_SEC_WEBSOCKET_KEY'], env.get('HTTP_ORIGIN', ''))
# Map the websocket URL to the upstream localhost:4000x Notebook instance
parts = urlparse(request.url)
parts = parts._replace(scheme="ws", netloc="localhost:{}".format(port))
url = urlunparse(parts)
# Proxy initial connection headers
headers = [(header, value) for header, value in request.headers.items() if header.lower() in CAPTURE_CONNECT_HEADERS]
logger.info("Connecting to upstream websockets: %s, headers: %s", url, headers)
ws = ProxyClient(url, headers=headers)
ws.connect()
# Happens only if exceptions fly around
return ""

Resources