What happens when await is called on synchronous methods? - python-asyncio

I have synchronous methods for uploading and downloading files (represented by sync_wait method). I want to do that asynchronously and endlessly in a way where I have parallel executions of uploaders and downloaders at the same time forever. I achieved this this way:
Running this code:
import time, asyncio
from functools import wraps, partial
# https://stackoverflow.com/a/50450553/3026886
def to_async(func):
#wraps(func)
async def run(*args, **kwargs):
return await asyncio.get_event_loop().run_in_executor(None, partial(func, *args, **kwargs))
return run
#to_async
def sync_wait(msg):
time.sleep(msg)
async def producer(n, queue):
while True:
msg = .2
await sync_wait(msg)
print(f'{n}p')
await queue.put(msg)
async def consumer(n, queue):
while True:
msg = await queue.get()
print(f'{n}c')
await sync_wait(msg)
async def main():
queue = queue = asyncio.Queue(10)
producers = [producer(n, queue) for n in range(2)]
consumers = [consumer(n, queue) for n in range(4)]
await asyncio.gather(*(producers + consumers), return_exceptions=True)
if __name__ == "__main__":
asyncio.run(main())
Printed this output:
1p
0p
0c
1c
1p
2c
0p
3c
1p
2c
0p
1c
1p
0c
0p
3c
1p
2c
0p
3c
1p
0c
0p
3c
...
which makes sense since I have 2 producers and 4 consumers interacting with my queue. My boss told me I didn't need the to_async decorator. But after removing only the decorator from sync_wait definiton, I got no prints at all. How can i explain this new behavior?

When you await something that cannot be awaited, things crash:
# python3 -m asyncio
asyncio REPL 3.9.9 (main, Jan 10 2022, 11:05:09)
[Clang 10.0.1 (clang-1001.0.46.4)] on darwin
Use "await" directly instead of "asyncio.run()".
Type "help", "copyright", "credits" or "license" for more information.
>>> import asyncio
>>> await time.sleep(.5) # sleeps .5 secs before failing to await
Traceback (most recent call last):
...
TypeError: object NoneType can't be used in 'await' expression
However, if you put failing things into a Task or gather them, they just fail silently until you actually await them to retrieve their result.
>>> async def fail():
... print("about to fail...")
... print(1/0)
...
>>> t = asyncio.create_task(fail())
about to fail...
>>> await t # only fails noticeably when retrieving result
Traceback (most recent call last):
...
ZeroDivisionError: division by zero
Consequently, if you put failing tasks into a gather(..., return_exceptions=True) that has an infinitely running, non-failing task, failures are never reported.

Related

Make multiprocessing.Queue accessible from asyncio [duplicate]

This question already has answers here:
FastAPI runs api-calls in serial instead of parallel fashion
(2 answers)
Is there a way to use asyncio.Queue in multiple threads?
(4 answers)
Closed 19 days ago.
The community is reviewing whether to reopen this question as of 18 days ago.
Given a multiprocessing.Queue that is filled from different Python threads, created via ThreadPoolExecutor.submit(...).
How to access that Queue with asyncio / Trio / Anyio in a safe manner (context FastAPI) and reliable manner?
I am aware of Janus library, but prefer a custom solution here.
Asked (hopefully) more concisely:
How to implement the
await <something_is_in_my_multiprocessing_queue>
to have it accesible with async/await and to prevent blocking the event loop?
What synchronization mechanism in general would you suggest?
(Attention here: multiprocessing.Queue not asyncio.Queue)
Actually, I figured it out.
Given a method, that reads the mp.Queue:
def read_queue_blocking():
return queue.get()
Comment: And this is the main issue: A call to get is blocking.
We can now either
use `asyncio.loop.run_in_executor' in asyncio EventLoop.
( see https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.loop.run_in_executor ) or
use anyio with await anyio.to_thread.run_sync(...) to execute the blocking retrieval of data from the queue in a separate thread.
For FastAPI
#app.websocket("/ws/{client_id}")
async def websocket_endpoint(websocket: WebSocket, client_id: str):
await websocket.accept()
while True:
import anyio
queue_result = await anyio.to_thread.run_sync(read_queue_blocking)
await websocket.send_text(f"Message text was: {queue_result}")
I remastered the answer to show case when main thread with asyncio loop is feed with data from child processes (ProcessPoolExecutor):
from concurrent.futures import ProcessPoolExecutor
import asyncio
from random import randint
from functools import partial
def some_heavy_task() -> int:
sum(i * i for i in range(10 ** 8))
return randint(1, 9)
def callback(fut: asyncio.Future, q: asyncio.Queue) -> None:
"""callback is used instead of mp.Queue to get feed from child processes."""
loop = asyncio.get_event_loop()
if not fut.exception() and not fut.cancelled():
loop.call_soon(q.put_nowait, f"name-{fut.name}: {fut.result()}")
async def result_picker(q: asyncio.Queue) -> None:
"""Returns results to some outer world."""
while True:
res = await q.get()
# imagine it is websocket
print(f"Result from heavy_work_producer: {res}")
q.task_done() # mark task as done here
async def heavy_work_producer(q: asyncio.Queue) -> None:
"""Wrapper around all multiprocessing work."""
loop = asyncio.get_event_loop()
with ProcessPoolExecutor(max_workers=4) as pool:
heavy_tasks = [loop.run_in_executor(pool, some_heavy_task) for _ in range(12)]
[i.add_done_callback(partial(callback, q=q)) for i in heavy_tasks]
[setattr(t, "name", i) for i, t in enumerate(heavy_tasks)] # just name them
await asyncio.gather(*heavy_tasks)
async def amain():
"""Main entrypoint of async app."""
q = asyncio.Queue()
asyncio.create_task(result_picker(q))
await heavy_work_producer(q)
# do not let result_picker finish when heavy_work_producer is done
# wait all results to show
await q.join()
print("All done.")
if __name__ == '__main__':
asyncio.run(amain())

Simple syntax to asynchronously get access to MODBUS register

I am trying to run three simple tasks in parallel using asyncio and sharing global variables.
Two of them are working perfectly. One read websockets ("async with websockets.connect("ws://192.168.1.137:9000") as websocket:" Another one access IO via a dedicated library.
I did not find any solution and the good syntax for getting AsyncModbusTCPClient running within the third task (sync MODBUS is easy to implement but would not fit within async task)
The following would just block everything:
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", loop=loop, port=502, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
Full code below
from pymodbus.client.asynchronous import schedulers
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient
import json
import time
from pypx800v5 import *
import aiohttp
import asyncio
import requests_async as requests
import numpy as np
import logging
from datetime import datetime
import websockets
import contextvars
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
# SDM230 via MODBUS
SDM230A=["Voltage","Current","Active Power","Apparent Power","Reactive Power","Power Factor","Phase Angle","Frequency","Import Active Energy","Export Active Energy","Import Reactive Energy","Export Reactive Energy"]
SDM230B=["Total system power demand","Maximum total system power demand","Current system positive power demand","Maximum system positive power demand","Current system reverse power demand","Maximum system reverse power demand"]
SDM230C=["Current demand","Maximum current Demand"]
SDM230D=["Total Active Energy","Total Reactive Energy"]
SDM230Labels=SDM230A+SDM230B+SDM230C+SDM230D
SDM230Var=["Voltage","Current","ActivePower","ApparentPower","ReactivePower","PowerFactor","PhaseAngle","Frequency","ImportActiveEnergy","ExportActiveEnergy","ImportReactiveEnergy","ExportReactiveEnergy","TotalSysPowerDemand","MaxTotalSysPowerDemand","CurrentSysPositivePowerDemand","MaxSysPositivePowerDemand","CurrentSysReversePowerDemand","MaxSysReversePowerDemand","CurrentDemand","MaximumCurrentDemand","TotalActiveEnergy","TotalReactiveEnergy"]
VoltageAdd=262199
CurrentAdd=262200
ActivePowerAdd=262201
ImportActiveEnergyAdd=262202
# inversor via Websockets
TempChaudiereAdd=262198
PuissMaxChauffeauAdd=262193
WREDAdd=262194
PacBat6TLAdd=262195
totPVAdd=262196
SOC6TLAdd=262197
# shared variables
WRED= 0
PacBat6TL=0
PacPV6TL=0
Pac6TLM=0
SOC6TL=0
PAC6TL=0
totPV=0
# --------------------------------------------------------------------------- #
# configure the client logging
# --------------------------------------------------------------------------- #
logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", port=502, loop=loop, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
async def get_var_socket():
global WRED
global PacBat6TL
global PacPV6TL
global Pac6TLM
global SOC6TL
global PAC6TL
global totPV
print("")
i=0
dict={}
async with websockets.connect("ws://192.168.1.137:9000") as websocket:
while True:
i=i+1
data=(await websocket.recv())
try:
message=json.loads(data)
except:
break
if "product" in message:
if message["product"]=="ems":
print(message)
if "WRED" in message:
WRED=message["WRED"]
if "PacBat6TL" in message:
PacBat6TL=message["PacBat6TL"]
if "PacPV6TL" in message:
PacPV6TL=message["PacPV6TL"]
totPV=PacPV6TL
if "Pac6TLM" in message:
Pac6TLM=message["Pac6TLM"]
totPV=totPV+Pac6TLM
if "SOC6TL" in message:
SOC6TL=message["SOC6TL"]
if "PAC6TL" in message:
PAC6TL=message["PAC6TL"]
async def get_ipx_update():
print("")
i=0
dict={}
async with IPX800(host='192.168.1.139', api_key='API') as ipx:
await ipx.init_config()
while True:
try:
await ipx.update_ana(WREDAdd,WRED)
except:
print("ERROR")
try:
await ipx.update_ana(PacBat6TLAdd,PacBat6TL)
except:
print("ERROR")
try:
await ipx.update_ana(totPVAdd,totPV)
except:
print("ERROR")
try:
await ipx.update_ana(SOC6TLAdd,SOC6TL)
except:
print("ERROR")
await asyncio.sleep(1)
def main():
loop = asyncio.get_event_loop()
loop.create_task(get_var_socket())
loop.create_task(get_ipx_update())
loop.create_task(get_var_modbus(loop))
loop.run_forever()
if __name__ == '__main__':
try:
main()
except Exception as f:
print('main error: ', f)
sleep(3)
Using the async_modbus library (built on the top of umodbus https://pypi.org/project/async-modbus/) it works very well.
I have used this library with success.
Please find below the syntax,
async def get_var_modbus(loop):
reader, writer = await asyncio.open_connection('192.168.1.200', 502)
client = AsyncTCPClient((reader, writer))loop=loop, port=502, timeout=20, unit=3)
while True:
print("Reading holding registers ADAM3066")
reply = await client.read_holding_registers(slave_id=3, starting_address=0, quantity=8)
print("reply:",reply)
await asyncio.sleep(1)
OUTPUT:
Reading holding registers ADAM3066
reply: [65535 65535 65535 65535 289 65535 65535 65535]
The ADAM 3066 is a RS-485 MODBUS RTU 1-WIRE interface connected to a MODBUS TCP gateway at 192.168.1.200, I have one sensor connected on the input 5 of ADAM 3066 which return a temperature of 28.9 degrees C

Proper way to retrieve the result of tasks in asyncio

I am trying to examine the wait method of the asyncio module. I have this primitive test app to serve as a playground:
import asyncio
async def foo():
return 1 # datetime.datetime.now()
async def entry_point():
coros = [foo()]*3
done, pending = await asyncio.wait(coros, return_when=asyncio.FIRST_COMPLETED)
for obj in done:
print(obj._result) # works, but seem dirty
asyncio.run(entry_point())
Basically, my goal is to get the result of the first completed task. And here's I am a bit confused in terminology. The docs says that asyncio.wait
Returns two sets of Tasks/Futures: (done, pending).
How do I know whether the object is a task or a future?
And the main question is, how do I extract the result of the successfully ended task ?
One way is to access the protected attribute _result (as is shown in my code snippet). But I am feeling like there's a cleaner way to do that. What is the proper pattern to achieve that?
https://docs.python.org/3/library/asyncio-task.html#waiting-primitives
The doc for asyncio.wait has the following note:
Deprecated since version 3.8, will be removed in version 3.11: Passing
coroutine objects to wait() directly is deprecated.
Therefore you should use asyncio.Task that also has the asyncio.Task.result method:
test.py:
import asyncio
import random
async def task(i):
t = random.uniform(1, 5)
print(f"START: {i} ({t:.3f}s)")
await asyncio.sleep(t)
print(f"END: {i}")
return i
async def main():
tasks = []
for i in range(5):
tasks.append(asyncio.create_task(task(i)))
done, pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED)
for t in done:
print(t.result())
if __name__ == "__main__":
asyncio.run(main())
Test:
$ python test.py
START: 0 (2.743s)
START: 1 (2.490s)
START: 2 (4.785s)
START: 3 (3.746s)
START: 4 (1.010s)
END: 4
4
If you want to retrieve all results and get the earliest next result first, use asyncio.as_completed instead:
...
for t in asyncio.as_completed(tasks):
print(await t)
...
Test 2:
$ python test.py
START: 0 (2.155s)
START: 1 (1.309s)
START: 2 (3.380s)
START: 3 (3.451s)
START: 4 (1.587s)
END: 1
1
END: 4
4
END: 0
0
END: 2
2
END: 3
3

Python async input

I'm trying to perform some actions during getting input from a user. I found this question but both answers not working for me.
My code:
In [1]: import asyncio
In [2]: import aioconsole
In [3]:
In [3]:
In [3]: async def test():
...: await asyncio.sleep(5)
...: await aioconsole.ainput('Is this your line? ')
...: await asyncio.sleep(5)
In [4]: asyncio.run(test()) # sleeping, input, sleeping (synchronously)
I'm expecting that input will be accessible during sleeping (or simple counting for example), but it's not happening.
What I do wrong?
What I do wrong?
You used await, which (as the name implies) means "wait". If you want things to happen at the same time, you need to tell them to run in the background, e.g. using asyncio.create_task() or concurrently, e.g. using asyncio.gather(). For example:
async def say_hi(message):
await asyncio.sleep(1)
print(message)
async def test():
_, response, _ = await asyncio.gather(
say_hi("hello"),
aioconsole.ainput('Is this your line? '),
say_hi("world"),
)
print("response was", response)
asyncio.run(test())

How can I run asyncio library code on top of Twisted's asyncioreactor?

I've managed to import/install Twisted's asyncioreactor and execute a trivial asynchronous function:
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
async def sleepy(reactor):
print("SLEEPING")
await task.deferLater(reactor, 3.0, lambda: None)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
I'd like to intermix an asyncio library in said code, for instance asyncio.sleep. I've tried the following:
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
import asyncio
async def sleepy(reactor):
print("SLEEPING")
await asyncio.sleep(3)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
which produces the following error:
$ python test.py
SLEEPING
main function encountered error
Traceback (most recent call last):
File "test.py", line 16, in <module>
#task.react
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/task.py", line 908, in react
finished = main(_reactor, *argv)
File "test.py", line 18, in main
d = ensureDeferred(sleepy(reactor))
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/defer.py", line 823, in ensureDeferred
return _inlineCallbacks(None, coro, Deferred())
--- <exception caught here> ---
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/site-packages/twisted/internet/defer.py", line 1301, in _inlineCallbacks
result = g.send(result)
File "test.py", line 11, in sleepy
await asyncio.sleep(3)
File "/Users/blz/.pyenv/versions/3.6.0/lib/python3.6/asyncio/tasks.py", line 476, in sleep
return (yield from future)
builtins.AssertionError: yield from wasn't used with future
Fair enough, thought I, so I tried swapping await asyncio.sleep(3) with await ensureDeferred(asyncio.sleep(3)) and await asyncio.ensure_future(asyncio.sleep(3)), but I get exactly the same error.
How can I schedule an aio coroutine (and/or Future) to run on the same event loop as is used by asyncioreactor?
so I tried swapping await asyncio.sleep(3) with await
ensureDeferred(asyncio.sleep(3)) and await
asyncio.ensure_future(asyncio.sleep(3))
You were almost there, you should combine the two, and use Deferred.fromFuture instead of ensureDeferred:
await Deferred.fromFuture(asyncio.ensure_future(asyncio.sleep(3)))
The rule is, async def functions running in Twisted context (with ensureDeferred) can await only on Deferred, and async def functions running in asyncio context (with ensure_future) only on asyncio Future (one can always await on other coroutine objects (results from async def function calls), but the chain will ultimately lead to Deferred/Future). To convert from asyncio Future to Deferred use Deferred.fromFuture and to convert to asyncio Future use Deferred.asFuture.
One can switch contexts from one to another and back. In this (contrived) example we start with sleepy_twisted run in
Twisted context, which does Twisted sleep, then it switches to asyncio context to run sleepy_asyncio which does asyncio sleep, but then switches again to Twisted context for Twisted sleep:
from twisted.internet import asyncioreactor, task
from twisted.internet.defer import inlineCallbacks, ensureDeferred, Deferred
import asyncio
asyncioreactor.install()
async def sleepy_asyncio(reactor):
print("Sleep 2")
await asyncio.sleep(1)
print("Sleep 3")
await Deferred.asFuture(task.deferLater(reactor, 1, lambda: None), loop=asyncio.get_running_loop())
async def sleepy_twisted(reactor):
print("Sleep 1")
await task.deferLater(reactor, 1, lambda: None)
await Deferred.fromFuture(asyncio.ensure_future(sleepy_asyncio(reactor)))
print("done")
#task.react
def main(reactor):
return ensureDeferred(sleepy_twisted(reactor))
Wow, you've come across an interesting corner case!
By using asyncio.sleep() you have triggered some interesting behavior.
I think you might have uncovered a bug in
Twisted's integration with the Python 3 asyncioreactor and async/await.
You may wish to follow up with the Twisted developer's on the Twisted mailing list.
I'm not 100% sure, but here are my thoughts.
The implementation of asyncio.sleep() is tightly coupled to the Python 3
asyncio implementation. It uses the asyncio.Future (which is similar to Twisted's deferred), and it uses get_event_loop() (which is similar to Twisted's reactor).
asyncio.sleep is implemented like this:
#coroutine
def sleep(delay, result=None, *, loop=None):
"""Coroutine that completes after a given time (in seconds)."""
if delay == 0:
yield
return result
if loop is None:
loop = events.get_event_loop()
future = loop.create_future()
h = future._loop.call_later(delay,
futures._set_result_unless_cancelled,
future, result)
try:
return (yield from future)
finally:
h.cancel()
I changed your code example slightly to pass Twisted's
asyncioreactor event loop into asyncio.sleep():
from twisted.internet import asyncioreactor
asyncioreactor.install()
from twisted.internet import reactor
from twisted.internet import task
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import ensureDeferred
import asyncio
async def sleepy(reactor):
print("SLEEPING")
await asyncio.sleep(3, loop=reactor._asyncioEventloop)
print("done sleep")
return 42
#task.react
def main(reactor):
d = ensureDeferred(sleepy(reactor))
d.addCallback(print)
return d
I still got the same error as you: builtins.AssertionError: yield from wasn't used with future
The stack trace looks like:
main function encountered error
Traceback (most recent call last):
File "b.py", line 16, in <module>
#task.react
File "/Users/crodrigues/twisted8/src/twisted/internet/task.py", line 908, in react
finished = main(_reactor, *argv)
File "b.py", line 19, in main
d = ensureDeferred(sleepy(reactor))
File "/Users/crodrigues/twisted8/src/twisted/internet/defer.py", line 823, in ensureDeferred
return _inlineCallbacks(None, coro, Deferred())
--- <exception caught here> ---
File "/Users/crodrigues/twisted8/src/twisted/internet/defer.py", line 1301, in _inlineCallbacks
result = g.send(result)
File "b.py", line 12, in sleepy
await asyncio.sleep(3, loop=reactor._asyncioEventloop)
File "/usr/local/Cellar/python3/3.6.0/Frameworks/Python.framework/Versions/3.6/lib/python3.6/asyncio/tasks.py", line 478, in sleep
return (yield from future)
builtins.AssertionError: yield from wasn't used with future
I think the asyncio.sleep() is a coroutine that is supposed to run to completion
on the asyncio loop, but this is not happening here, hence the assertion.
I think the problem is being introduced by result = g.send(result).
I'm not sure you can send() to a coroutine like this and expect it to work.
I advise you to ask on the Twisted mailing list to get more detailed feedback.

Resources