How can I run asyncio library code on top of Twisted's asyncioreactor? - interop

I've managed to import/install Twisted's asyncioreactor and execute a trivial asynchronous function:
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
async def sleepy(reactor):
print("SLEEPING")
await task.deferLater(reactor, 3.0, lambda: None)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
I'd like to intermix an asyncio library in said code, for instance asyncio.sleep. I've tried the following:
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
import asyncio
async def sleepy(reactor):
print("SLEEPING")
await asyncio.sleep(3)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
which produces the following error:
$ python test.py
SLEEPING
main function encountered error
Traceback (most recent call last):
File "test.py", line 16, in <module>
#task.react
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/task.py", line 908, in react
finished = main(_reactor, *argv)
File "test.py", line 18, in main
d = ensureDeferred(sleepy(reactor))
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/defer.py", line 823, in ensureDeferred
return _inlineCallbacks(None, coro, Deferred())
--- <exception caught here> ---
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/defer.py", line 1301, in _inlineCallbacks
result = g.send(result)
File "test.py", line 11, in sleepy
await asyncio.sleep(3)
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/asyncio/tasks.py", line 476, in sleep
return (yield from future)
builtins.AssertionError: yield from wasn't used with future
Fair enough, thought I, so I tried swapping await asyncio.sleep(3) with await ensureDeferred(asyncio.sleep(3)) and await asyncio.ensure_future(asyncio.sleep(3)), but I get exactly the same error.
How can I schedule an aio coroutine (and/or Future) to run on the same event loop as is used by asyncioreactor?

so I tried swapping await asyncio.sleep(3) with await
ensureDeferred(asyncio.sleep(3)) and await
asyncio.ensure_future(asyncio.sleep(3))
You were almost there, you should combine the two, and use Deferred.fromFuture instead of ensureDeferred:
await Deferred.fromFuture(asyncio.ensure_future(asyncio.sleep(3)))
The rule is, async def functions running in Twisted context (with ensureDeferred) can await only on Deferred, and async def functions running in asyncio context (with ensure_future) only on asyncio Future (one can always await on other coroutine objects (results from async def function calls), but the chain will ultimately lead to Deferred/Future). To convert from asyncio Future to Deferred use Deferred.fromFuture and to convert to asyncio Future use Deferred.asFuture.
One can switch contexts from one to another and back. In this (contrived) example we start with sleepy_twisted run in
Twisted context, which does Twisted sleep, then it switches to asyncio context to run sleepy_asyncio which does asyncio sleep, but then switches again to Twisted context for Twisted sleep:
from twisted.internet import asyncioreactor, task
from twisted.internet.defer import inlineCallbacks, ensureDeferred, Deferred
import asyncio
asyncioreactor.install()
async def sleepy_asyncio(reactor):
print("Sleep 2")
await asyncio.sleep(1)
print("Sleep 3")
await Deferred.asFuture(task.deferLater(reactor, 1, lambda: None), loop=asyncio.get_running_loop())
async def sleepy_twisted(reactor):
print("Sleep 1")
await task.deferLater(reactor, 1, lambda: None)
await Deferred.fromFuture(asyncio.ensure_future(sleepy_asyncio(reactor)))
print("done")
#task.react
def main(reactor):
return ensureDeferred(sleepy_twisted(reactor))

Wow, you've come across an interesting corner case!
By using asyncio.sleep() you have triggered some interesting behavior.
I think you might have uncovered a bug in
Twisted's integration with the Python 3 asyncioreactor and async/await.
You may wish to follow up with the Twisted developer's on the Twisted mailing list.
I'm not 100% sure, but here are my thoughts.
The implementation of asyncio.sleep() is tightly coupled to the Python 3
asyncio implementation. It uses the asyncio.Future (which is similar to Twisted's deferred), and it uses get_event_loop() (which is similar to Twisted's reactor).
asyncio.sleep is implemented like this:
#coroutine
def sleep(delay, result=None, *, loop=None):
"""Coroutine that completes after a given time (in seconds)."""
if delay == 0:
yield
return result
if loop is None:
loop = events.get_event_loop()
future = loop.create_future()
h = future._loop.call_later(delay,
futures._set_result_unless_cancelled,
future, result)
try:
return (yield from future)
finally:
h.cancel()
I changed your code example slightly to pass Twisted's
asyncioreactor event loop into asyncio.sleep():
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import reactor
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
import asyncio
async def sleepy(reactor):
print("SLEEPING")
await asyncio.sleep(3, loop=reactor._asyncioEventloop)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
I still got the same error as you: builtins.AssertionError: yield from wasn't used with future
The stack trace looks like:
main function encountered error
Traceback (most recent call last):
File "b.py", line 16, in <module>
#task.react
File "/Users/crodrigues/twisted8/src/twisted/internet/task.py", line 908, in react
finished = main(_reactor, *argv)
File "b.py", line 19, in main
d = ensureDeferred(sleepy(reactor))
File "/Users/crodrigues/twisted8/src/twisted/internet/defer.py", line 823, in ensureDeferred
return _inlineCallbacks(None, coro, Deferred())
--- <exception caught here> ---
File "/Users/crodrigues/twisted8/src/twisted/internet/defer.py", line 1301, in _inlineCallbacks
result = g.send(result)
File "b.py", line 12, in sleepy
await asyncio.sleep(3, loop=reactor._asyncioEventloop)
File "/usr/local/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/tasks.py", line 478, in sleep
return (yield from future)
builtins.AssertionError: yield from wasn't used with future
I think the asyncio.sleep() is a coroutine that is supposed to run to completion
on the asyncio loop, but this is not happening here, hence the assertion.
I think the problem is being introduced by result = g.send(result).
I'm not sure you can send() to a coroutine like this and expect it to work.
I advise you to ask on the Twisted mailing list to get more detailed feedback.

Related

Simple syntax to asynchronously get access to MODBUS register

I am trying to run three simple tasks in parallel using asyncio and sharing global variables.
Two of them are working perfectly. One read websockets ("async with websockets.connect("ws://192.168.1.137:9000") as websocket:" Another one access IO via a dedicated library.
I did not find any solution and the good syntax for getting AsyncModbusTCPClient running within the third task (sync MODBUS is easy to implement but would not fit within async task)
The following would just block everything:
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", loop=loop, port=502, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
Full code below
from pymodbus.client.asynchronous import schedulers
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient
import json
import time
from pypx800v5 import *
import aiohttp
import asyncio
import requests_async as requests
import numpy as np
import logging
from datetime import datetime
import websockets
import contextvars
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
# SDM230 via MODBUS
SDM230A=["Voltage","Current","Active Power","Apparent Power","Reactive Power","Power Factor","Phase Angle","Frequency","Import Active Energy","Export Active Energy","Import Reactive Energy","Export Reactive Energy"]
SDM230B=["Total system power demand","Maximum total system power demand","Current system positive power demand","Maximum system positive power demand","Current system reverse power demand","Maximum system reverse power demand"]
SDM230C=["Current demand","Maximum current Demand"]
SDM230D=["Total Active Energy","Total Reactive Energy"]
SDM230Labels=SDM230A+SDM230B+SDM230C+SDM230D
SDM230Var=["Voltage","Current","ActivePower","ApparentPower","ReactivePower","PowerFactor","PhaseAngle","Frequency","ImportActiveEnergy","ExportActiveEnergy","ImportReactiveEnergy","ExportReactiveEnergy","TotalSysPowerDemand","MaxTotalSysPowerDemand","CurrentSysPositivePowerDemand","MaxSysPositivePowerDemand","CurrentSysReversePowerDemand","MaxSysReversePowerDemand","CurrentDemand","MaximumCurrentDemand","TotalActiveEnergy","TotalReactiveEnergy"]
VoltageAdd=262199
CurrentAdd=262200
ActivePowerAdd=262201
ImportActiveEnergyAdd=262202
# inversor via Websockets
TempChaudiereAdd=262198
PuissMaxChauffeauAdd=262193
WREDAdd=262194
PacBat6TLAdd=262195
totPVAdd=262196
SOC6TLAdd=262197
# shared variables
WRED= 0
PacBat6TL=0
PacPV6TL=0
Pac6TLM=0
SOC6TL=0
PAC6TL=0
totPV=0
# --------------------------------------------------------------------------- #
# configure the client logging
# --------------------------------------------------------------------------- #
logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", port=502, loop=loop, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
async def get_var_socket():
global WRED
global PacBat6TL
global PacPV6TL
global Pac6TLM
global SOC6TL
global PAC6TL
global totPV
print("")
i=0
dict={}
async with websockets.connect("ws://192.168.1.137:9000") as websocket:
while True:
i=i+1
data=(await websocket.recv())
try:
message=json.loads(data)
except:
break
if "product" in message:
if message["product"]=="ems":
print(message)
if "WRED" in message:
WRED=message["WRED"]
if "PacBat6TL" in message:
PacBat6TL=message["PacBat6TL"]
if "PacPV6TL" in message:
PacPV6TL=message["PacPV6TL"]
totPV=PacPV6TL
if "Pac6TLM" in message:
Pac6TLM=message["Pac6TLM"]
totPV=totPV+Pac6TLM
if "SOC6TL" in message:
SOC6TL=message["SOC6TL"]
if "PAC6TL" in message:
PAC6TL=message["PAC6TL"]
async def get_ipx_update():
print("")
i=0
dict={}
async with IPX800(host='192.168.1.139', api_key='API') as ipx:
await ipx.init_config()
while True:
try:
await ipx.update_ana(WREDAdd,WRED)
except:
print("ERROR")
try:
await ipx.update_ana(PacBat6TLAdd,PacBat6TL)
except:
print("ERROR")
try:
await ipx.update_ana(totPVAdd,totPV)
except:
print("ERROR")
try:
await ipx.update_ana(SOC6TLAdd,SOC6TL)
except:
print("ERROR")
await asyncio.sleep(1)
def main():
loop = asyncio.get_event_loop()
loop.create_task(get_var_socket())
loop.create_task(get_ipx_update())
loop.create_task(get_var_modbus(loop))
loop.run_forever()
if __name__ == '__main__':
try:
main()
except Exception as f:
print('main error: ', f)
sleep(3)
Using the async_modbus library (built on the top of umodbus https://pypi.org/project/async-modbus/) it works very well.
I have used this library with success.
Please find below the syntax,
async def get_var_modbus(loop):
reader, writer = await asyncio.open_connection('192.168.1.200', 502)
client = AsyncTCPClient((reader, writer))loop=loop, port=502, timeout=20, unit=3)
while True:
print("Reading holding registers ADAM3066")
reply = await client.read_holding_registers(slave_id=3, starting_address=0, quantity=8)
print("reply:",reply)
await asyncio.sleep(1)
OUTPUT:
Reading holding registers ADAM3066
reply: [65535 65535 65535 65535 289 65535 65535 65535]
The ADAM 3066 is a RS-485 MODBUS RTU 1-WIRE interface connected to a MODBUS TCP gateway at 192.168.1.200, I have one sensor connected on the input 5 of ADAM 3066 which return a temperature of 28.9 degrees C

How to call async method from greenlet (playwright)

My framework (Locust, https://github.com/locustio/locust) is based on gevent and greenlets. But I would like to leverage Playwright (https://playwright.dev/python/), which is built on asyncio.
Naively using Playwrights sync api doesnt work and gives an exception:
playwright._impl._api_types.Error: It looks like you are using Playwright Sync API inside the asyncio loop.
Please use the Async API instead.
I'm looking for some kind of best practice on how to use async in combination with gevent.
I've tried a couple different approaches but I dont know if I'm close or if what I'm trying to do is even possible (I have some experience with gevent, but havent really used asyncio before)
Edit: I kind of have something working now (I've removed Locust and just directly spawned some greenlets to make it easier to understan). Is this as good as it gets, or is there a better solution?
import asyncio
import threading
from playwright.async_api import async_playwright
import gevent
def thr(i):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(do_stuff(i))
loop.close()
async def do_stuff(i):
playwright = await async_playwright().start()
browser = await playwright.chromium.launch(headless=False)
page = await browser.new_page()
await page.wait_for_timeout(5000)
await page.goto(f"https://google.com")
await page.close()
print(i)
def green(i):
t = threading.Thread(target=thr, args=(i,))
t.start()
# t.join() # joining doesnt work, but I couldnt be bothered right now :)
g1 = gevent.spawn(green, 1)
g2 = gevent.spawn(green, 2)
g1.join()
g2.join()
Insipred by #user4815162342 's comment, I went with something like this:
from playwright.async_api import async_playwright # need to import this first
from gevent import monkey, spawn
import asyncio
import gevent
monkey.patch_all()
loop = asyncio.new_event_loop()
async def f():
print("start")
playwright = await async_playwright().start()
browser = await playwright.chromium.launch(headless=True)
context = await browser.new_context()
page = await context.new_page()
await page.goto(f"https://www.google.com")
print("done")
def greeny():
while True: # and not other_exit_condition
future = asyncio.run_coroutine_threadsafe(f(), loop)
while not future.done():
gevent.sleep(1)
greenlet1 = spawn(greeny)
greenlet2 = spawn(greeny)
loop.run_forever()
The actual implementation will end up in Locust some day, probably after some optimization (reusing browser instance etc)
Here's a simple way to integrate asyncio and gevent:
Run an asyncio loop in a dedicated thread
Use asyncio.run_coroutine_threadsafe() to run a coroutine
Use gevent.event.Event to wait until the coroutine resolves
import asyncio
import threading
import gevent
loop = asyncio.new_event_loop()
loop_thread = threading.Thread(target=loop.run_forever, daemon=True)
loop_thread.start()
async def your_coro():
# ...
def wait_until_complete(coro):
future = asyncio.run_coroutine_threadsafe(coro, loop)
event = gevent.event.Event()
future.add_dome_callback(lambda _: event.set())
event.wait()
return future.result()
result = wait_until_complete(your_coro())

How can I build a list of async tasks with argument for AsyncHTMLSession().run?

From the documentation I have this example I've tested and works..
from requests_html import AsyncHTMLSession
asession = AsyncHTMLSession()
async def get_pythonorg():
r = await asession.get('https://python.org/')
async def get_reddit():
r = await asession.get('https://reddit.com/')
async def get_google():
r = await asession.get('https://google.com/')
result = asession.run(get_pythonorg, get_reddit, get_google)
But what if my urls are variable? I'd like to do this..
from requests_html import AsyncHTMLSession
urls = ('https://python.org/', 'https://reddit.com/', 'https://google.com/')
asession = AsyncHTMLSession()
async def get_url(url):
r = await asession.get(url)
tasks = []
for url in urls:
tasks.append(get_url(url=url))
result = asession.run(*tasks)
but I get..
Traceback (most recent call last): File "./test.py", line 17, in <module>
result = asession.run(*tasks) File "/home/deanresin/.local/lib/python3.7/site-packages/requests_html.py", line 772, in run
asyncio.ensure_future(coro()) for coro in coros File "/home/deanresin/.local/lib/python3.7/site-packages/requests_html.py", line 772, in <listcomp>
asyncio.ensure_future(coro()) for coro in coros TypeError: 'coroutine' object is not callable sys:1: RuntimeWarning: coroutine 'get_url' was never awaited
TLTR:
It is because you are passing coroutines objects and not coroutines functions.
You can do:
from requests_html import AsyncHTMLSession
urls = ('https://python.org/', 'https://reddit.com/', 'https://google.com/')
asession = AsyncHTMLSession()
async def get_url(url):
r = await asession.get(url)
# if you want async javascript rendered page:
await r.html.arender()
return r
all_responses = asession.run(*[lambda url=url: get_url(url) for url in urls])
Explanations:
The error is coming from result = asession.run(*tasks) so let's see the source code of AsyncHTMLSession.run() :
def run(self, *coros):
""" Pass in all the coroutines you want to run, it will wrap each one
in a task, run it and wait for the result. Return a list with all
results, this is returned in the same order coros are passed in. """
tasks = [
asyncio.ensure_future(coro()) for coro in coros
]
done, _ = self.loop.run_until_complete(asyncio.wait(tasks))
return [t.result() for t in done]
So in the following list comprehension you are normally passing a callable coroutine function and not coroutine object
tasks = [
asyncio.ensure_future(coro()) for coro in coros
]
But you then in your error you have for coro in coros TypeError: 'coroutine' object is not callable.
So you are passing a list of coroutines objects and not coroutines functions.
Indeed when you are doing this:
tasks = []
for url in urls:
tasks.append(get_url(url=url))
You are making a list of coroutines objects by calling your coroutine function.
So in order to make a list of coroutines functions you can use lambda function like this:
[lambda url=url: get_url(url) for url in urls]
Note the url=url in order to make the url parameter accessed when the lambda is defined.
More informations about this here.

Python async input

I'm trying to perform some actions during getting input from a user. I found this question but both answers not working for me.
My code:
In [1]: import asyncio
In [2]: import aioconsole
In [3]:
In [3]:
In [3]: async def test():
...: await asyncio.sleep(5)
...: await aioconsole.ainput('Is this your line? ')
...: await asyncio.sleep(5)
In [4]: asyncio.run(test()) # sleeping, input, sleeping (synchronously)
I'm expecting that input will be accessible during sleeping (or simple counting for example), but it's not happening.
What I do wrong?
What I do wrong?
You used await, which (as the name implies) means "wait". If you want things to happen at the same time, you need to tell them to run in the background, e.g. using asyncio.create_task() or concurrently, e.g. using asyncio.gather(). For example:
async def say_hi(message):
await asyncio.sleep(1)
print(message)
async def test():
_, response, _ = await asyncio.gather(
say_hi("hello"),
aioconsole.ainput('Is this your line? '),
say_hi("world"),
)
print("response was", response)
asyncio.run(test())

Asyncio script performs slowly, similar to sync script

I'm writing an asyncio script to retrieve stock bars data from Interactive Brokers via the ib_insync library.
While I have the script working, the performance is similar to a serial script. I was hoping to see a drastic improvement in speed. This code will be used in production.
I am new to asyncio and feel like I'm missing an important element. Below is the full script. Would very much appriciate assistance in speeding this up. Thanks.
import asyncio
import ib_insync as ibi
import nest_asyncio
import pandas as pd
nest_asyncio.apply()
class App:
async def run(self, symbols):
print(f"1 start run: {symbols}")
self.ib = ibi.IB()
with await self.ib.connectAsync("127.0.0.1", "****", clientId="****"):
contracts = [ibi.Stock(symbol, "SMART", "USD") for symbol in symbols]
bars_dict = dict()
print(f"2 start loop: {symbols}")
for contract in contracts:
bars = await self.ib.reqHistoricalDataAsync(
contract,
endDateTime="",
durationStr="1 M",
barSizeSetting="1 day",
whatToShow="ADJUSTED_LAST",
useRTH=True,
)
# Convert to dataframes.
bars_dict[contract.symbol] = ibi.util.df(bars)
print(f"3 End bars: {symbols}")
return bars_dict
async def main(self):
res = await asyncio.gather(self.run(self.sp500(0, 100)))
return res
def stop(self):
self.ib.disconnect()
def sp500(self, start=None, end=10):
payload = pd.read_html(
"https://en.wikipedia.org/wiki/List_of_S%26P_500_companies"
)
first_table = payload[0]
sp500 = first_table["Symbol"].sort_values().to_list()
return sp500[start:end]
if __name__ == "__main__":
import time
start = time.time()
app = App()
try:
print(f"START CALL")
res = asyncio.run(app.main())
print(f"END CALL")
except (KeyboardInterrupt, SystemExit):
app.stop()
for ticker, bars in res[0].items():
print(f"{ticker}\n{bars}")
print(f"Total time: {(time.time() - start)}")
Your script is running in sequence. The call to asyncio.gather() in main is useless because it is invoked with just one coroutine. You're supposed to call it with multiple coroutines to have them run in parallel.
For example, you could remove the asyncio.gather() from main (just await self.run(self.sp500(0, 100) there) and instead use it to parallelize calls to reqHistoricalDataAsync:
class App:
async def run(self, symbols):
print(f"1 start run: {symbols}")
self.ib = ibi.IB()
with await self.ib.connectAsync("127.0.0.1", "****", clientId="****"):
contracts = [ibi.Stock(symbol, "SMART", "USD") for symbol in symbols]
print(f"2 start loop: {symbols}")
all_bars = await asyncio.gather(*[
self.ib.reqHistoricalDataAsync(
contract,
endDateTime="",
durationStr="1 M",
barSizeSetting="1 day",
whatToShow="ADJUSTED_LAST",
useRTH=True,
)
for contract in contracts
])
bars_dict = {}
for contract, bars in zip(contracts, all_bars):
# Convert to dataframes.
bars_dict[contract.symbol] = ibi.util.df(bars)
print(f"3 End bars: {symbols}")
return bars_dict

Resources