python asyncio class to create functions dynamically and execute with own interval parallel - python-asyncio

I am trying to write a class that create methods dynamically that should executed parallel each with it's own duration with asyncio. But I am really new in the topic python asyncio and now on a point where I got stuck and have no idea how to go.
I collect servers with ip, port and command duration from config file and try to create methods in a loop and then gather these methods with async, here is my code:
import asyncio
from datetime import datetime
# from common.config import ConfigConstructor
class RCONserver:
def __init__(self, game: str, server_name=None):
self.game = game
self.server_name = server_name
# self.game_config = ConfigConstructor('cfg/rcon_server.yml')
async def send_rcon_command(self, ip: str, port: str, period: int, cnt: int):
await asyncio.sleep(int(period))
print(str(datetime.now()) + ": " + ip + " " + port)
def get_servers(self):
servers = []
for server in ['game1','game2']:
print(server)
if server[:4] == "game":
# s = self.game_config
# s.fetch_section(server)
# print(s)
servers.append(
self.send_rcon_command('192.168.178.1',
'30000',
300,
3)
return servers
async def main():
obj = RCONserver('game')
await asyncio.gather(*obj.get_servers())
asyncio.run(main())
The code is running but only one time for each server in the yml File.
What do I have to do to run it periodically for the given parameter watch period?

i think this should do the trick with loop and gather i can create functions dynamically and run each with it's own interval parallel:
import asyncio
from datetime import datetime
import random
class RCONServer:
def __init__(self):
self.rcon_loop = asyncio.get_event_loop()
def dt(self):
return datetime.now().strftime("%Y/%m/%d %H:%M:%S")
def build_rcon_functions(self):
rcon_servers = []
for server in ['game1','game2']:
rcon_servers.append(
self.rcon_command(server,
"192.168.0.1",
"30000",
"some_password",
random.randint(5, 10)
)
)
return rcon_servers
async def rcon_command(self, server: str, ip: str, port: str, passwd: str, interval: int):
while True:
await asyncio.sleep(int(interval))
print(self.dt(), ">", server)
async def run_loop(self):
rcon_tasks = self.build_rcon_functions()
try:
print(self.dt(), "> Start")
await asyncio.gather(*rcon_tasks)
self.rcon_loop.run_forever()
except KeyboardInterrupt:
pass
finally:
print(self.dt(), "> End")
self.rcon_loop.close()
obj = RCONServer()
asyncio.run(obj.run_loop())
Some suggestions for optimizing? or some hints how it can be solved better?

Related

Urwid and Multiprocessing

i try to sequence some actions in urwid
I made a timer which run in background and communicate with the mainprocess
like this:
from multiprocessing import Process, Pipe
import time
import urwid
def show_or_exit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
class midiloop(urwid.Frame):
def __init__(self):
self.message = urwid.Text('Press Space', align='center')
self.filler = urwid.Filler(self.message, "middle")
super().__init__(urwid.Frame(self.filler))
def keypress(self, size, key):
if key == " ":
self.seq()
else:
return key
def timer(self,conn):
x = 0
while True:
if (conn.poll() == False):
pass
else:
z = conn.recv()
if (z == "kill"):
return()
conn.send(x)
x+=1
time.sleep(0.05)
def seq(self):
self.parent_conn, self.child_conn = Pipe()
self.p = Process(target=self.timer, args=(self.child_conn,))
self.p.start()
while True:
if (self.parent_conn.poll(None)):
self.y = self.parent_conn.recv()
self.message.set_text(str(self.y))
loop.draw_screen()
if ( self.y > 100 ):
self.parent_conn.send("kill")
self.message.set_text("Press Space")
return()
if __name__ == '__main__':
midiloop = midiloop()
loop = urwid.MainLoop(midiloop, unhandled_input=show_or_exit, handle_mouse=True)
loop.run()
The problem is i'm blocking urwid mainloop with while True:
So anyone can give me a solution to listen for key Q to quit the program before it reachs the end of the loop for example and more generally to interact with urwid and communicate with the subprocess
It seems to be rather complicated to combine multiprocessing and urwid.
Since you're using a timer and your class is called midiloop, I'm going to guess that maybe you want to implement a mini sequencer.
One possible way of implementing that is using an asyncio loop instead of urwid's MainLoop, and schedule events with the loop.call_later() function. I've implemented a simple drum machine with that approach in the past, using urwid for drawing the sequencer, asyncio for scheduling the play events and simpleaudio to play. You can see the code for that here: https://github.com/eliasdorneles/kickit
If you still want to implement communication with multiprocessing, I think your best bet is to use urwid.AsyncioEventLoop and the aiopipe helper for duplex communication.
It's not very minimal I'm afraid. However I did spend a day writing this Urwid frontend that starts, stops and communicates with a subprocess.
import os
import sys
from multiprocessing import Process, Pipe, Event
from collections import deque
import urwid
class suppress_stdout_stderr(object):
"""
Supresses the stdout and stderr by piping them to dev null...
The same place I send bad faith replies to my tweets
"""
def __enter__(self):
self.outnull_file = open(os.devnull, 'w')
self.errnull_file = open(os.devnull, 'w')
self.old_stdout_fileno_undup = sys.stdout.fileno()
self.old_stderr_fileno_undup = sys.stderr.fileno()
self.old_stdout_fileno = os.dup(sys.stdout.fileno())
self.old_stderr_fileno = os.dup(sys.stderr.fileno())
self.old_stdout = sys.stdout
self.old_stderr = sys.stderr
os.dup2(self.outnull_file.fileno(), self.old_stdout_fileno_undup)
os.dup2(self.errnull_file.fileno(), self.old_stderr_fileno_undup)
sys.stdout = self.outnull_file
sys.stderr = self.errnull_file
return self
def __exit__(self, *_):
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
os.dup2(self.old_stdout_fileno, self.old_stdout_fileno_undup)
os.dup2(self.old_stderr_fileno, self.old_stderr_fileno_undup)
os.close(self.old_stdout_fileno)
os.close(self.old_stderr_fileno)
self.outnull_file.close()
self.errnull_file.close()
def subprocess_main(transmit, stop_process):
with suppress_stdout_stderr():
import time
yup = ['yuuuup', 'yuuuuup', 'yeaup', 'yeoop']
nope = ['noooooooe', 'noooope', 'nope', 'nope']
mesg = 0
i = 0
while True:
i = i % len(yup)
if transmit.poll():
mesg = transmit.recv()
if mesg == 'Yup':
transmit.send(yup[i])
if mesg == 'Nope':
transmit.send(nope[i])
if stop_process.wait(0):
break
i += 1
time.sleep(2)
class SubProcess:
def __init__(self, main):
"""
Handles forking, stopping and communication with a subprocess
:param main: subprocess method to run method signature is
def main(transmit, stop_process):
transmit: is a multiprocess Pipe to send data to parent process
stop_process: is multiprocess Event to set when you want the process to exit
"""
self.main = main
self.recv, self.transmit = None, None
self.stop_process = None
self.proc = None
def fork(self):
"""
Forks and starts the subprocess
"""
self.recv, self.transmit = Pipe(duplex=True)
self.stop_process = Event()
self.proc = Process(target=self.main, args=(self.transmit, self.stop_process))
self.proc.start()
def write_pipe(self, item):
self.recv.send(item)
def read_pipe(self):
"""
Reads data sent by the process into a list and returns it
:return:
"""
item = []
if self.recv is not None:
try:
while self.recv.poll():
item += [self.recv.recv()]
except:
pass
return item
def stop(self):
"""
Sets the event to tell the process to exit.
note: this is co-operative multi-tasking, the process must respect the flag or this won't work!
"""
self.stop_process.set()
self.proc.join()
class UrwidFrontend:
def __init__(self, subprocess_main):
"""
Urwid frontend to control the subprocess and display it's output
"""
self.title = 'Urwid Frontend Demo'
self.choices = 'Start Subprocess|Quit'.split('|')
self.response = None
self.item = deque(maxlen=10)
self.event_loop = urwid.SelectEventLoop()
# start the heartbeat
self.event_loop.alarm(0, self.heartbeat)
self.main = urwid.Padding(self.main_menu(), left=2, right=2)
self.top = urwid.Overlay(self.main, urwid.SolidFill(u'\N{MEDIUM SHADE}'),
align='center', width=('relative', 60),
valign='middle', height=('relative', 60),
min_width=20, min_height=9)
self.loop = urwid.MainLoop(self.top, palette=[('reversed', 'standout', ''), ], event_loop=self.event_loop)
self.subprocess = SubProcess(subprocess_main)
def exit_program(self, button):
raise urwid.ExitMainLoop()
def main_menu(self):
body = [urwid.Text(self.title), urwid.Divider()]
for c in self.choices:
button = urwid.Button(c)
urwid.connect_signal(button, 'click', self.handle_button, c)
body.append(urwid.AttrMap(button, None, focus_map='reversed'))
return urwid.ListBox(urwid.SimpleFocusListWalker(body))
def subproc_menu(self):
self.response = urwid.Text('Waiting ...')
body = [self.response, urwid.Divider()]
choices = ['Yup', 'Nope', 'Stop Subprocess']
for c in choices:
button = urwid.Button(c)
urwid.connect_signal(button, 'click', self.handle_button, c)
body.append(urwid.AttrMap(button, None, focus_map='reversed'))
listbox = urwid.ListBox(urwid.SimpleFocusListWalker(body))
return listbox
def update_subproc_menu(self, text):
self.response.set_text(text)
def handle_button(self, button, choice):
if choice == 'Start Subprocess':
self.main.original_widget = self.subproc_menu()
self.subprocess.fork()
self.item = deque(maxlen=10)
if choice == 'Stop Subprocess':
self.subprocess.stop()
self.main.original_widget = self.main_menu()
if choice == 'Quit':
self.exit_program(button)
if choice == 'Yup':
self.subprocess.write_pipe('Yup')
if choice == 'Nope':
self.subprocess.write_pipe('Nope')
def heartbeat(self):
"""
heartbeat that runs 24 times per second
"""
# read from the process
self.item.append(self.subprocess.read_pipe())
# display it
if self.response is not None:
self.update_subproc_menu(['Subprocess started\n', f'{self.item}\n', ])
self.loop.draw_screen()
# set the next beat
self.event_loop.alarm(1 / 24, self.heartbeat)
def run(self):
self.loop.run()
if __name__ == "__main__":
app = UrwidFrontend(subprocess_main)
app.run()

I am new to python and i am trying to create a leaderboard

I would like to, as an admin, add points to a specific member's balance
Know how to create a JSON file with all the "points" someone has.
import discord
from discord.ext import commands
import random
import os
import json
# this line of code gives the bot a comand prefix
bot = commands.Bot(command_prefix="/")
# This line of code tells the bot to start up
#bot.event
async def on_ready():
print("The bot is now online!")
#bot.command(pass_context=True)
async def leaderboard(ctx):
await ctx.send("This is a work in progress, this will dieplay the leaderboard")
amounts = {}
#bot.command(pass_context=True)
async def balance(ctx):
id = str(ctx.message.author.id)
if id in amounts:
await ctx.send("You have {} ben points".format(amounts[id]))
else:
await ctx.send("You do not have an account")
#bot.command(pass_context=True)
async def register(ctx):
id = str(ctx.message.author.id)
if id not in amounts.keys:
amounts[id] = 100
await ctx.send("You are now registered")
_save()
else:
await ctx.send("You already have an account")
#bot.command(pass_context=True)
async def transfer(ctx, amount: int, other: discord.Member):
primary_id = str(ctx.message.author.id)
other_id = str(other.id)
if primary_id not in amounts:
await ctx.send("You do not have an account")
elif other_id not in amounts:
await ctx.send("The other party does not have an account")
elif amounts[primary_id] < amount:
await ctx.send("You cannot afford this transaction")
else:
amounts[primary_id] -= amount
amounts[other_id] += amount
await ctx.send("Transaction complete")
_save()
def _save():
with open('amounts.json', 'w+') as f:
json.dump(amounts, f)
#bot.command()
async def save():
_save()
#This line of code tells the bot to run
bot.run("Token")
I am not sure on what I am meant to do from here.
I might be over complicating the code if anyone can make it more efficient I will be incredibly grateful.
Here's the essential usage and everything you'll need to know for the basics of JSON:
# Creating a dictionary with some values
>>> data = {"foo": "bar", "key": "value"}
# Opening a file and writing to it
>>> with open("db.json", "w+") as fp:
... json.dump(data, fp, sort_keys=True, indent=4) # Kwargs for beautification
# Loading in data from a file
>>> with open("db.json", "r") as fp:
... data = json.load(fp)
# Accessing the values
>>> data["foo"]
'bar'
>>> data["key"]
'value'
This can be adapted to suit your needs, perhaps something like:
# Let's say the JSON has the following structure:
# {"users": {112233445566778899: {"points": 0}, 224466881133557799: {"points": 0}}}
# The "users" key is a bit redundant if you're only gonna store users in the file.
# It's down to what you'll be storing in the file and readability etc.
import json
import random
import typing
def add_points(member_id: str, amount: int):
# Open the file first as to load in all the users' data
# Making a new dict would just overwrite the current file's contents
with open("file.json", "r") as fp:
data = json.load(fp)
data["users"][member_id]["points"] += amount
# Write the updated data to the file
with open("file.json", "w+") as fp:
json.dump(data, fp, sort_keys=True, indent=4)
return data["users"][user_id]["points"]
#bot.command()
async def give(ctx, member: typing.Optional[discord.Member]=None, points:typing.Optional[int]=None):
if not member:
member = ctx.author
if not points:
points = random.randint(10, 50)
# Have a separate getter for the points - this was just for the sake of concise code
total_points = add_points(member.id, points)
await ctx.send(f"Successfully gave {member.mention} {points} points! Total: {total_points}")
I'll be happy to clarify anything if need be.
References:
Dictionary exercises - Might be worth taking a look into so then you're comfortable with how they work.
f-Strings - Python 3.6.0+
json.load()
json.dump()
typing.Optional
User.id
commands.Context
Context.author

Message Counter for specific words discord.py

I'm trying to build a message counter for discord.py that counts specific messages and then responds with the number of times the message was said in that day.
I have the base but I don't know how to build the actual counter... Here is my code:
import discord
from discord.ext import commands
import discord.utils
class Message_Counter(commands.Cog):
def __init__(self, client):
self.client = client
#commands.Cog.listener()
async def on_message(self, ctx, message):
if "oof" in message.content:
await ctx.send(str(counter))
elif "Thot" in message.content:
await ctx.send(str(counter))
def setup(client):
client.add_cog(Message_Counter(client))
Any help would be much appreciated. I'm using the rewrite branch of discord.py if that helps.
Basically for Thot it would respond with **Thot counter**: <number>
For oof it would respond with **oof counter**: <number>
so on so forth.
I would also like it to reset the counter on a daily basis so that around every 24 hours the counter starts over.
Using json (quick introduction to JSON here)
We want to create a json file with name counters.json in the same folder as the file(s) for your bot. Its contents should look like this:
{
"Thot": 0,
"oof": 0
}
Loading a json file into a dictionary works with the json library:
(If you have no idea what the "with open" stuff is about, here is a primer on file reading and writing operations)
import json
def load_counters():
with open('counters.json', 'r') as f:
counters = json.load(f)
return counters
Saving the dictionary back to json works in a very similar vein:
def save_counters(counters):
with open('counters.json', 'w') as f:
json.dump(counters, f)
Now that we have a way of loading and unloading our counters from json, we can change the bot code to use them:
import discord
from discord.ext import commands
import discord.utils
class Message_Counter(commands.Cog):
def __init__(self, client):
self.client = client
#commands.Cog.listener()
async def on_message(self, ctx, message):
if "oof" in message.content:
counters = load_counters()
counters["oof"] += 1
await ctx.send(str(counters["oof"]))
save_counters(counters)
elif "Thot" in message.content:
counters = load_counters()
counters["Thot"] += 1
await ctx.send(str(counters["Thot"]))
save_counters(counters)
def setup(client):
client.add_cog(Message_Counter(client))

Python Autobahn Websocket Server Receving One connection at a time

Python Asyncio or Twisted used by Autobahn are supposed to Handle concurrent connection at the same time.
I followed a good tutorial on autobahn read-the-doc, it all worked well, yet the server is receiving only one connection and process it's request and then after that accept a second one.
How can I ensure that the server receives multiple connection cocurrently without holding other connecting peer?
I have searched across the web the whole day but no success
here is my code(I have cut out a lot of code while debugging)
from autobahn.asyncio.websocket import WebSocketServerProtocol
from autobahn.asyncio.websocket import WebSocketServerFactory
class NMmapperServerProtocol(WebSocketServerProtocol):
cmd = NMmapperWSCommandParser() # I have cut out this due to debugging
def onMessage(self, payload, isBinary):
"""
#payload the message
#isBinary whether it's a binary message
"""
try:
offload_payload = json.loads(payload.decode("utf-8"))
await asyncio.gather(cmd.processWSCommands(offload_payload, self))
except Exception as e:
raise
def onConnect(self, request):
"""
When we've got a peer connect to our server
"""
try:
#print(self)
print(request.peer, "Has connected")
except Exception as e:
raise
def onOpen(self):
"""
We have a fully connection
"""
try:
# Some database action can be made from here
print("Connection now opened")
except Exception as e:
raise
def onClose(self, wasClean, code, reason):
"""
# the client is closing his or her
connection
"""
try:
print("wasClean ", wasClean)
print("code ", code)
print("reason ", reason)
except Exception as e:
raise
# Setters
def setCsrftoken(self, cookie_string):
"""
# parse an set
"""
self.csrftoken = self.parse_csrftoken(cookie_string)
# Setters
def setSession(self, cookie_string):
"""
# parse an set
"""
self.session = self.parse_session(cookie_string)
if __name__=="__main__":
if(IN_PRODUCTION):
print("RUNNING ")
factory = NMmapperWSServerFactory(PRODUCTION_HOST, PRODUCTION_PORT)
factory.run_loop()
else:
print("Running on dev")
factory = WebSocketServerFactory()
factory.protocol = NMmapperServerProtocol
loop = asyncio.get_event_loop()
coro = loop.create_server(factory, '0.0.0.0', 9000)
server = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server.close()
loop.close()
Thank you.
I finally got it working as expected. Being an asyncio library
i had to prefix async on each method that performs long running task
The Problem was with the onMessage, I had to process the messages in parallel
not to block the other clients wanting to process there messages also.
so to do that i had to
offload_payload = json.loads(payload.decode("utf-8"))
loop = asyncio.get_event_loop()
# Offload command processing
loop.create_task(self.processWSCommands(offload_payload, self))
This way every message is processed in parallel
Even in such an instance ensure that the method or function processing the message don't block.
from autobahn.asyncio.websocket import WebSocketServerProtocol
from autobahn.asyncio.websocket import WebSocketServerFactory
class NMmapperServerProtocol(WebSocketServerProtocol):
cmd = NMmapperWSCommandParser() # I have cut out this due to debugging
async def onMessage(self, payload, isBinary):
"""
#payload the message
#isBinary whether it's a binary message
"""
try:
offload_payload = json.loads(payload.decode("utf-8"))
loop = asyncio.get_event_loop()
#loop.create_task(runner(10, self.peer))
#asyncio.gather(runner(20, self.peer))
# Offload command processing
loop.create_task(self.processWSCommands(offload_payload, self))
except Exception as e:
raise
def onConnect(self, request):
"""
When we've got a peer connect to our server
"""
try:
#print(self)
print(request.peer, "Has connected")
except Exception as e:
raise
def onOpen(self):
"""
We have a fully connection
"""
try:
# Some database action can be made from here
print("Connection now opened")
except Exception as e:
raise
def onClose(self, wasClean, code, reason):
"""
# the client is closing his or her
connection
"""
try:
print("wasClean ", wasClean)
print("code ", code)
print("reason ", reason)
except Exception as e:
raise
# Setters
def setCsrftoken(self, cookie_string):
"""
# parse an set
"""
self.csrftoken = self.parse_csrftoken(cookie_string)
# Setters
def setSession(self, cookie_string):
"""
# parse an set
"""
self.session = self.parse_session(cookie_string)
if __name__=="__main__":
if(IN_PRODUCTION):
print("RUNNING ")
factory = NMmapperWSServerFactory(PRODUCTION_HOST, PRODUCTION_PORT)
factory.run_loop()
else:
print("Running on dev")
factory = WebSocketServerFactory()
factory.protocol = NMmapperServerProtocol
loop = asyncio.get_event_loop()
coro = loop.create_server(factory, '0.0.0.0', 9000)
server = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server.close()
loop.close()

infinite loop cannot be connected websocket server

A client connect websocket and calls tail_log method, and new client can't connect
How to solve this problem
def on_message(self, message):
def tail_log(user,ip,port,cmd,log_path,url):
cmd = "/usr/bin/ssh -p {port} {user}#{ipaddr} {command} {logpath}" \
.format(user=user, ipaddr=ip, port=port, command=cmd, logpath=log_path)
f = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
while True:
line = f.stdout.readline().strip()
if line == '':
self.write_message('failed')
break
self.write_message(line)
tail_log(user=SSH_USER,ip=IP_ADDR,cmd=CMD,port=SSH_PORT,log_path=LOG_PATH,url=SOCKET_URL)
Your infinite loop must yield control back to Tornado's event loop, either by executing a yield, await, or by returning from the tail_log function. Since your infinite loop does not yield control to the event loop, the event loop can never process any more events, including new websocket connections.
Try using Tornado's own process module to read from your subprocess's stdout asynchronously. Something like this:
import tornado.ioloop
import tornado.process
import tornado.web
import tornado.websocket
class TailHandler(tornado.websocket.WebSocketHandler):
def open(self):
self.write_message(u"Tailing....")
self.p = tornado.process.Subprocess(
"tail -f log.log",
stdout=tornado.process.Subprocess.STREAM,
stderr=tornado.process.Subprocess.STREAM,
shell=True)
tornado.ioloop.IOLoop.current().add_callback(
lambda: self.tail(self.p.stdout))
tornado.ioloop.IOLoop.current().add_callback(
lambda: self.tail(self.p.stderr))
self.p.set_exit_callback(self.close)
async def tail(self, stream):
try:
while True:
line = await stream.read_until(b'\n')
if line:
self.write_message(line.decode('utf-8'))
else:
# "tail" exited.
return
except tornado.iostream.StreamClosedError:
# Subprocess killed.
pass
finally:
self.close()
def on_close(self):
# Client disconnected, kill the subprocess.
self.p.proc.kill()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("""<html><head><script>
var ws = new WebSocket("ws://localhost:8888/tail");
ws.onmessage = function (evt) {
document.write('<p>' + evt.data + '</p>');
};</script></head></html>""")
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
(r"/tail", TailHandler),
])
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
If you're not on Python 3.5 yet, substitute #gen.coroutine for "async def", substitute "yield" for "await", and substitute "break" for "return".

Resources