Simple syntax to asynchronously get access to MODBUS register - python-asyncio

I am trying to run three simple tasks in parallel using asyncio and sharing global variables.
Two of them are working perfectly. One read websockets ("async with websockets.connect("ws://192.168.1.137:9000") as websocket:" Another one access IO via a dedicated library.
I did not find any solution and the good syntax for getting AsyncModbusTCPClient running within the third task (sync MODBUS is easy to implement but would not fit within async task)
The following would just block everything:
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", loop=loop, port=502, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
Full code below
from pymodbus.client.asynchronous import schedulers
from pymodbus.client.asynchronous.tcp import AsyncModbusTCPClient
import json
import time
from pypx800v5 import *
import aiohttp
import asyncio
import requests_async as requests
import numpy as np
import logging
from datetime import datetime
import websockets
import contextvars
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
# SDM230 via MODBUS
SDM230A=["Voltage","Current","Active Power","Apparent Power","Reactive Power","Power Factor","Phase Angle","Frequency","Import Active Energy","Export Active Energy","Import Reactive Energy","Export Reactive Energy"]
SDM230B=["Total system power demand","Maximum total system power demand","Current system positive power demand","Maximum system positive power demand","Current system reverse power demand","Maximum system reverse power demand"]
SDM230C=["Current demand","Maximum current Demand"]
SDM230D=["Total Active Energy","Total Reactive Energy"]
SDM230Labels=SDM230A+SDM230B+SDM230C+SDM230D
SDM230Var=["Voltage","Current","ActivePower","ApparentPower","ReactivePower","PowerFactor","PhaseAngle","Frequency","ImportActiveEnergy","ExportActiveEnergy","ImportReactiveEnergy","ExportReactiveEnergy","TotalSysPowerDemand","MaxTotalSysPowerDemand","CurrentSysPositivePowerDemand","MaxSysPositivePowerDemand","CurrentSysReversePowerDemand","MaxSysReversePowerDemand","CurrentDemand","MaximumCurrentDemand","TotalActiveEnergy","TotalReactiveEnergy"]
VoltageAdd=262199
CurrentAdd=262200
ActivePowerAdd=262201
ImportActiveEnergyAdd=262202
# inversor via Websockets
TempChaudiereAdd=262198
PuissMaxChauffeauAdd=262193
WREDAdd=262194
PacBat6TLAdd=262195
totPVAdd=262196
SOC6TLAdd=262197
# shared variables
WRED= 0
PacBat6TL=0
PacPV6TL=0
Pac6TLM=0
SOC6TL=0
PAC6TL=0
totPV=0
# --------------------------------------------------------------------------- #
# configure the client logging
# --------------------------------------------------------------------------- #
logging.basicConfig()
log = logging.getLogger()
log.setLevel(logging.DEBUG)
async def get_var_modbus(loop):
client = await AsyncModbusTCPClient( schedulers.ASYNC_IO,host="192.168.1.200", port=502, loop=loop, timeout=20, unit=3)
while True:
print("INIT")
print("Reading coils")
rr = await client.read_input_registers(0, 1, unit=0x03)
print(rr.registers)
await asyncio.sleep(1)
async def get_var_socket():
global WRED
global PacBat6TL
global PacPV6TL
global Pac6TLM
global SOC6TL
global PAC6TL
global totPV
print("")
i=0
dict={}
async with websockets.connect("ws://192.168.1.137:9000") as websocket:
while True:
i=i+1
data=(await websocket.recv())
try:
message=json.loads(data)
except:
break
if "product" in message:
if message["product"]=="ems":
print(message)
if "WRED" in message:
WRED=message["WRED"]
if "PacBat6TL" in message:
PacBat6TL=message["PacBat6TL"]
if "PacPV6TL" in message:
PacPV6TL=message["PacPV6TL"]
totPV=PacPV6TL
if "Pac6TLM" in message:
Pac6TLM=message["Pac6TLM"]
totPV=totPV+Pac6TLM
if "SOC6TL" in message:
SOC6TL=message["SOC6TL"]
if "PAC6TL" in message:
PAC6TL=message["PAC6TL"]
async def get_ipx_update():
print("")
i=0
dict={}
async with IPX800(host='192.168.1.139', api_key='API') as ipx:
await ipx.init_config()
while True:
try:
await ipx.update_ana(WREDAdd,WRED)
except:
print("ERROR")
try:
await ipx.update_ana(PacBat6TLAdd,PacBat6TL)
except:
print("ERROR")
try:
await ipx.update_ana(totPVAdd,totPV)
except:
print("ERROR")
try:
await ipx.update_ana(SOC6TLAdd,SOC6TL)
except:
print("ERROR")
await asyncio.sleep(1)
def main():
loop = asyncio.get_event_loop()
loop.create_task(get_var_socket())
loop.create_task(get_ipx_update())
loop.create_task(get_var_modbus(loop))
loop.run_forever()
if __name__ == '__main__':
try:
main()
except Exception as f:
print('main error: ', f)
sleep(3)

Using the async_modbus library (built on the top of umodbus https://pypi.org/project/async-modbus/) it works very well.
I have used this library with success.
Please find below the syntax,
async def get_var_modbus(loop):
reader, writer = await asyncio.open_connection('192.168.1.200', 502)
client = AsyncTCPClient((reader, writer))loop=loop, port=502, timeout=20, unit=3)
while True:
print("Reading holding registers ADAM3066")
reply = await client.read_holding_registers(slave_id=3, starting_address=0, quantity=8)
print("reply:",reply)
await asyncio.sleep(1)
OUTPUT:
Reading holding registers ADAM3066
reply: [65535 65535 65535 65535 289 65535 65535 65535]
The ADAM 3066 is a RS-485 MODBUS RTU 1-WIRE interface connected to a MODBUS TCP gateway at 192.168.1.200, I have one sensor connected on the input 5 of ADAM 3066 which return a temperature of 28.9 degrees C

Related

Make multiprocessing.Queue accessible from asyncio [duplicate]

This question already has answers here:
FastAPI runs api-calls in serial instead of parallel fashion
(2 answers)
Is there a way to use asyncio.Queue in multiple threads?
(4 answers)
Closed 19 days ago.
The community is reviewing whether to reopen this question as of 18 days ago.
Given a multiprocessing.Queue that is filled from different Python threads, created via ThreadPoolExecutor.submit(...).
How to access that Queue with asyncio / Trio / Anyio in a safe manner (context FastAPI) and reliable manner?
I am aware of Janus library, but prefer a custom solution here.
Asked (hopefully) more concisely:
How to implement the
await <something_is_in_my_multiprocessing_queue>
to have it accesible with async/await and to prevent blocking the event loop?
What synchronization mechanism in general would you suggest?
(Attention here: multiprocessing.Queue not asyncio.Queue)
Actually, I figured it out.
Given a method, that reads the mp.Queue:
def read_queue_blocking():
return queue.get()
Comment: And this is the main issue: A call to get is blocking.
We can now either
use `asyncio.loop.run_in_executor' in asyncio EventLoop.
( see https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.loop.run_in_executor ) or
use anyio with await anyio.to_thread.run_sync(...) to execute the blocking retrieval of data from the queue in a separate thread.
For FastAPI
#app.websocket("/ws/{client_id}")
async def websocket_endpoint(websocket: WebSocket, client_id: str):
await websocket.accept()
while True:
import anyio
queue_result = await anyio.to_thread.run_sync(read_queue_blocking)
await websocket.send_text(f"Message text was: {queue_result}")
I remastered the answer to show case when main thread with asyncio loop is feed with data from child processes (ProcessPoolExecutor):
from concurrent.futures import ProcessPoolExecutor
import asyncio
from random import randint
from functools import partial
def some_heavy_task() -> int:
sum(i * i for i in range(10 ** 8))
return randint(1, 9)
def callback(fut: asyncio.Future, q: asyncio.Queue) -> None:
"""callback is used instead of mp.Queue to get feed from child processes."""
loop = asyncio.get_event_loop()
if not fut.exception() and not fut.cancelled():
loop.call_soon(q.put_nowait, f"name-{fut.name}: {fut.result()}")
async def result_picker(q: asyncio.Queue) -> None:
"""Returns results to some outer world."""
while True:
res = await q.get()
# imagine it is websocket
print(f"Result from heavy_work_producer: {res}")
q.task_done() # mark task as done here
async def heavy_work_producer(q: asyncio.Queue) -> None:
"""Wrapper around all multiprocessing work."""
loop = asyncio.get_event_loop()
with ProcessPoolExecutor(max_workers=4) as pool:
heavy_tasks = [loop.run_in_executor(pool, some_heavy_task) for _ in range(12)]
[i.add_done_callback(partial(callback, q=q)) for i in heavy_tasks]
[setattr(t, "name", i) for i, t in enumerate(heavy_tasks)] # just name them
await asyncio.gather(*heavy_tasks)
async def amain():
"""Main entrypoint of async app."""
q = asyncio.Queue()
asyncio.create_task(result_picker(q))
await heavy_work_producer(q)
# do not let result_picker finish when heavy_work_producer is done
# wait all results to show
await q.join()
print("All done.")
if __name__ == '__main__':
asyncio.run(amain())

How to call async method from greenlet (playwright)

My framework (Locust, https://github.com/locustio/locust) is based on gevent and greenlets. But I would like to leverage Playwright (https://playwright.dev/python/), which is built on asyncio.
Naively using Playwrights sync api doesnt work and gives an exception:
playwright._impl._api_types.Error: It looks like you are using Playwright Sync API inside the asyncio loop.
Please use the Async API instead.
I'm looking for some kind of best practice on how to use async in combination with gevent.
I've tried a couple different approaches but I dont know if I'm close or if what I'm trying to do is even possible (I have some experience with gevent, but havent really used asyncio before)
Edit: I kind of have something working now (I've removed Locust and just directly spawned some greenlets to make it easier to understan). Is this as good as it gets, or is there a better solution?
import asyncio
import threading
from playwright.async_api import async_playwright
import gevent
def thr(i):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(do_stuff(i))
loop.close()
async def do_stuff(i):
playwright = await async_playwright().start()
browser = await playwright.chromium.launch(headless=False)
page = await browser.new_page()
await page.wait_for_timeout(5000)
await page.goto(f"https://google.com")
await page.close()
print(i)
def green(i):
t = threading.Thread(target=thr, args=(i,))
t.start()
# t.join() # joining doesnt work, but I couldnt be bothered right now :)
g1 = gevent.spawn(green, 1)
g2 = gevent.spawn(green, 2)
g1.join()
g2.join()
Insipred by #user4815162342 's comment, I went with something like this:
from playwright.async_api import async_playwright # need to import this first
from gevent import monkey, spawn
import asyncio
import gevent
monkey.patch_all()
loop = asyncio.new_event_loop()
async def f():
print("start")
playwright = await async_playwright().start()
browser = await playwright.chromium.launch(headless=True)
context = await browser.new_context()
page = await context.new_page()
await page.goto(f"https://www.google.com")
print("done")
def greeny():
while True: # and not other_exit_condition
future = asyncio.run_coroutine_threadsafe(f(), loop)
while not future.done():
gevent.sleep(1)
greenlet1 = spawn(greeny)
greenlet2 = spawn(greeny)
loop.run_forever()
The actual implementation will end up in Locust some day, probably after some optimization (reusing browser instance etc)
Here's a simple way to integrate asyncio and gevent:
Run an asyncio loop in a dedicated thread
Use asyncio.run_coroutine_threadsafe() to run a coroutine
Use gevent.event.Event to wait until the coroutine resolves
import asyncio
import threading
import gevent
loop = asyncio.new_event_loop()
loop_thread = threading.Thread(target=loop.run_forever, daemon=True)
loop_thread.start()
async def your_coro():
# ...
def wait_until_complete(coro):
future = asyncio.run_coroutine_threadsafe(coro, loop)
event = gevent.event.Event()
future.add_dome_callback(lambda _: event.set())
event.wait()
return future.result()
result = wait_until_complete(your_coro())

Python async input

I'm trying to perform some actions during getting input from a user. I found this question but both answers not working for me.
My code:
In [1]: import asyncio
In [2]: import aioconsole
In [3]:
In [3]:
In [3]: async def test():
...: await asyncio.sleep(5)
...: await aioconsole.ainput('Is this your line? ')
...: await asyncio.sleep(5)
In [4]: asyncio.run(test()) # sleeping, input, sleeping (synchronously)
I'm expecting that input will be accessible during sleeping (or simple counting for example), but it's not happening.
What I do wrong?
What I do wrong?
You used await, which (as the name implies) means "wait". If you want things to happen at the same time, you need to tell them to run in the background, e.g. using asyncio.create_task() or concurrently, e.g. using asyncio.gather(). For example:
async def say_hi(message):
await asyncio.sleep(1)
print(message)
async def test():
_, response, _ = await asyncio.gather(
say_hi("hello"),
aioconsole.ainput('Is this your line? '),
say_hi("world"),
)
print("response was", response)
asyncio.run(test())

Asyncio script performs slowly, similar to sync script

I'm writing an asyncio script to retrieve stock bars data from Interactive Brokers via the ib_insync library.
While I have the script working, the performance is similar to a serial script. I was hoping to see a drastic improvement in speed. This code will be used in production.
I am new to asyncio and feel like I'm missing an important element. Below is the full script. Would very much appriciate assistance in speeding this up. Thanks.
import asyncio
import ib_insync as ibi
import nest_asyncio
import pandas as pd
nest_asyncio.apply()
class App:
async def run(self, symbols):
print(f"1 start run: {symbols}")
self.ib = ibi.IB()
with await self.ib.connectAsync("127.0.0.1", "****", clientId="****"):
contracts = [ibi.Stock(symbol, "SMART", "USD") for symbol in symbols]
bars_dict = dict()
print(f"2 start loop: {symbols}")
for contract in contracts:
bars = await self.ib.reqHistoricalDataAsync(
contract,
endDateTime="",
durationStr="1 M",
barSizeSetting="1 day",
whatToShow="ADJUSTED_LAST",
useRTH=True,
)
# Convert to dataframes.
bars_dict[contract.symbol] = ibi.util.df(bars)
print(f"3 End bars: {symbols}")
return bars_dict
async def main(self):
res = await asyncio.gather(self.run(self.sp500(0, 100)))
return res
def stop(self):
self.ib.disconnect()
def sp500(self, start=None, end=10):
payload = pd.read_html(
"https://en.wikipedia.org/wiki/List_of_S%26P_500_companies"
)
first_table = payload[0]
sp500 = first_table["Symbol"].sort_values().to_list()
return sp500[start:end]
if __name__ == "__main__":
import time
start = time.time()
app = App()
try:
print(f"START CALL")
res = asyncio.run(app.main())
print(f"END CALL")
except (KeyboardInterrupt, SystemExit):
app.stop()
for ticker, bars in res[0].items():
print(f"{ticker}\n{bars}")
print(f"Total time: {(time.time() - start)}")
Your script is running in sequence. The call to asyncio.gather() in main is useless because it is invoked with just one coroutine. You're supposed to call it with multiple coroutines to have them run in parallel.
For example, you could remove the asyncio.gather() from main (just await self.run(self.sp500(0, 100) there) and instead use it to parallelize calls to reqHistoricalDataAsync:
class App:
async def run(self, symbols):
print(f"1 start run: {symbols}")
self.ib = ibi.IB()
with await self.ib.connectAsync("127.0.0.1", "****", clientId="****"):
contracts = [ibi.Stock(symbol, "SMART", "USD") for symbol in symbols]
print(f"2 start loop: {symbols}")
all_bars = await asyncio.gather(*[
self.ib.reqHistoricalDataAsync(
contract,
endDateTime="",
durationStr="1 M",
barSizeSetting="1 day",
whatToShow="ADJUSTED_LAST",
useRTH=True,
)
for contract in contracts
])
bars_dict = {}
for contract, bars in zip(contracts, all_bars):
# Convert to dataframes.
bars_dict[contract.symbol] = ibi.util.df(bars)
print(f"3 End bars: {symbols}")
return bars_dict

Discord.py Rewrite Giphy Cog Error: Unclosed Connector

I'm trying to build a Giphy cog for my discord.py bot and it's throwing the following error
Unclosed client session
client_session: <aiohttp.client.ClientSession object at 0x000001CA509CDA60>
Unclosed connector
connections: ['[(<aiohttp.client_proto.ResponseHandler object at 0x000001CA51897BE0>, 77055.671)]']
connector: <aiohttp.connector.TCPConnector object at 0x000001CA509CDA30>
Does anyone know what is causing this and how I would fix it?
Here is my code:
import os
import aiohttp
import random
import discord
from discord.ext import commands
from dotenv import load_dotenv
load_dotenv()
prefix = os.getenv("CLIENT_PREFIX")
giphy_api = os.getenv("GIPHY_API_KEY")
command_attrs = {'hidden': False}
class FunCog(commands.Cog, name='Fun Commands', command_attrs=command_attrs):
def __init__(self, client):
self.client = client
#commands.command(name='gif')
async def _gif(self, ctx, *, search, json=None):
embed = discord.Embed(colour=discord.Colour.blue())
session = aiohttp.ClientSession()
if search == '':
response = await session.get('https://api.giphy.com/v1/gifs/random?api_key=' + giphy_api)
data = json.loads(await response.text())
embed.set_image(url=data['data']['images']['original']['url'])
else:
search.replace(' ', '+')
response = await session.get(
'http://api.giphy.com/v1/gifs/search?q=' + search + '&api_key=' + giphy_api + '&limit=10')
data = json.loads(await response.text())
gif_choice = random.randint(0, 9)
embed.set_image(url=data['data'][gif_choice]['images']['original']['url'])
await session.close()
await ctx.send(embed=embed)
def setup(client):
client.add_cog(FunCog(client))
I omitted my Giphy API Key for security reasons.
I'm using discord.py rewrite and python 3.8.6if it helps.
Basically I want to be able to search a gif with it by tag and it will respond with a random gif from giphy for the specified tag.
--EDIT--
Moved the error logging to my Events.py file
Moved the API Keys to my .env file
Removed the tenor_api env since it won't be getting used and has nothing to do with this question
Updated the code a bit to resolve a few missing parameters.
---EDIT---
Thanks to Fixator10 for the response that fixed the issue I was having in this post.
Here is my working code if anyone wants to use it:
import os
import aiohttp
import random
import discord
import json
from discord.ext import commands
from dotenv import load_dotenv
load_dotenv()
prefix = os.getenv("CLIENT_PREFIX")
giphy_api = os.getenv("GIPHY_API_KEY")
command_attrs = {'hidden': False}
class FunCog(commands.Cog, name='Fun Commands', command_attrs=command_attrs):
def __init__(self, client):
self.client = client
self.session = aiohttp.ClientSession()
def cog_unload(self):
self.client.loop.create_task(self.session.close())
#commands.command(name='gif')
async def _gif(self, ctx, *, search):
session = self.session
embed = discord.Embed(colour=discord.Color.dark_gold())
if search == '':
response = await session.get('https://api.giphy.com/v1/gifs/random?api_key=' + giphy_api)
data = json.loads(await response.text())
embed.set_image(url=data['data']['images']['original']['url'])
else:
search.replace(' ', '+')
response = await session.get(
'http://api.giphy.com/v1/gifs/search?q=' + search + '&api_key=' + giphy_api + '&limit=10')
data = json.loads(await response.text())
gif_choice = random.randint(0, 9)
embed.set_image(url=data['data'][gif_choice]['images']['original']['url'])
await ctx.send(embed=embed)
def setup(client):
client.add_cog(FunCog(client))
It's probably not the best since it only uses a single tag but I'll probably improve that at another time xD
This is caused by your session behavior. You dont close session if search is empty. In two words: you should close session after usage.
The most simple solution - is to use context manager:
# https://docs.aiohttp.org/en/stable/#client-example
async with aiohttp.ClientSession() as session:
async with session.get('http://python.org') as response:
Otherwise, you can create a session for your Cog and close it on unload:
class MyCog(commands.Cog):
def __init__(client):
self.client = client
self.session = aiohttp.ClientSession()
# https://discordpy.readthedocs.io/en/stable/ext/commands/api.html#discord.ext.commands.Cog.cog_unload
def cog_unload(self):
# since close is coroutine,
# and we need to call it in sync func
# lets create asyncio task
self.client.loop.create_task(self.session.close())
# or, we can use detach:
# self.session.detach()
#commands.command()
async def mycmd(ctx):
async with self.session.get("https://example.com") as response:
await ctx.send(response.status)

Resources