i try to sequence some actions in urwid
I made a timer which run in background and communicate with the mainprocess
like this:
from multiprocessing import Process, Pipe
import time
import urwid
def show_or_exit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
class midiloop(urwid.Frame):
def __init__(self):
self.message = urwid.Text('Press Space', align='center')
self.filler = urwid.Filler(self.message, "middle")
super().__init__(urwid.Frame(self.filler))
def keypress(self, size, key):
if key == " ":
self.seq()
else:
return key
def timer(self,conn):
x = 0
while True:
if (conn.poll() == False):
pass
else:
z = conn.recv()
if (z == "kill"):
return()
conn.send(x)
x+=1
time.sleep(0.05)
def seq(self):
self.parent_conn, self.child_conn = Pipe()
self.p = Process(target=self.timer, args=(self.child_conn,))
self.p.start()
while True:
if (self.parent_conn.poll(None)):
self.y = self.parent_conn.recv()
self.message.set_text(str(self.y))
loop.draw_screen()
if ( self.y > 100 ):
self.parent_conn.send("kill")
self.message.set_text("Press Space")
return()
if __name__ == '__main__':
midiloop = midiloop()
loop = urwid.MainLoop(midiloop, unhandled_input=show_or_exit, handle_mouse=True)
loop.run()
The problem is i'm blocking urwid mainloop with while True:
So anyone can give me a solution to listen for key Q to quit the program before it reachs the end of the loop for example and more generally to interact with urwid and communicate with the subprocess
It seems to be rather complicated to combine multiprocessing and urwid.
Since you're using a timer and your class is called midiloop, I'm going to guess that maybe you want to implement a mini sequencer.
One possible way of implementing that is using an asyncio loop instead of urwid's MainLoop, and schedule events with the loop.call_later() function. I've implemented a simple drum machine with that approach in the past, using urwid for drawing the sequencer, asyncio for scheduling the play events and simpleaudio to play. You can see the code for that here: https://github.com/eliasdorneles/kickit
If you still want to implement communication with multiprocessing, I think your best bet is to use urwid.AsyncioEventLoop and the aiopipe helper for duplex communication.
It's not very minimal I'm afraid. However I did spend a day writing this Urwid frontend that starts, stops and communicates with a subprocess.
import os
import sys
from multiprocessing import Process, Pipe, Event
from collections import deque
import urwid
class suppress_stdout_stderr(object):
"""
Supresses the stdout and stderr by piping them to dev null...
The same place I send bad faith replies to my tweets
"""
def __enter__(self):
self.outnull_file = open(os.devnull, 'w')
self.errnull_file = open(os.devnull, 'w')
self.old_stdout_fileno_undup = sys.stdout.fileno()
self.old_stderr_fileno_undup = sys.stderr.fileno()
self.old_stdout_fileno = os.dup(sys.stdout.fileno())
self.old_stderr_fileno = os.dup(sys.stderr.fileno())
self.old_stdout = sys.stdout
self.old_stderr = sys.stderr
os.dup2(self.outnull_file.fileno(), self.old_stdout_fileno_undup)
os.dup2(self.errnull_file.fileno(), self.old_stderr_fileno_undup)
sys.stdout = self.outnull_file
sys.stderr = self.errnull_file
return self
def __exit__(self, *_):
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
os.dup2(self.old_stdout_fileno, self.old_stdout_fileno_undup)
os.dup2(self.old_stderr_fileno, self.old_stderr_fileno_undup)
os.close(self.old_stdout_fileno)
os.close(self.old_stderr_fileno)
self.outnull_file.close()
self.errnull_file.close()
def subprocess_main(transmit, stop_process):
with suppress_stdout_stderr():
import time
yup = ['yuuuup', 'yuuuuup', 'yeaup', 'yeoop']
nope = ['noooooooe', 'noooope', 'nope', 'nope']
mesg = 0
i = 0
while True:
i = i % len(yup)
if transmit.poll():
mesg = transmit.recv()
if mesg == 'Yup':
transmit.send(yup[i])
if mesg == 'Nope':
transmit.send(nope[i])
if stop_process.wait(0):
break
i += 1
time.sleep(2)
class SubProcess:
def __init__(self, main):
"""
Handles forking, stopping and communication with a subprocess
:param main: subprocess method to run method signature is
def main(transmit, stop_process):
transmit: is a multiprocess Pipe to send data to parent process
stop_process: is multiprocess Event to set when you want the process to exit
"""
self.main = main
self.recv, self.transmit = None, None
self.stop_process = None
self.proc = None
def fork(self):
"""
Forks and starts the subprocess
"""
self.recv, self.transmit = Pipe(duplex=True)
self.stop_process = Event()
self.proc = Process(target=self.main, args=(self.transmit, self.stop_process))
self.proc.start()
def write_pipe(self, item):
self.recv.send(item)
def read_pipe(self):
"""
Reads data sent by the process into a list and returns it
:return:
"""
item = []
if self.recv is not None:
try:
while self.recv.poll():
item += [self.recv.recv()]
except:
pass
return item
def stop(self):
"""
Sets the event to tell the process to exit.
note: this is co-operative multi-tasking, the process must respect the flag or this won't work!
"""
self.stop_process.set()
self.proc.join()
class UrwidFrontend:
def __init__(self, subprocess_main):
"""
Urwid frontend to control the subprocess and display it's output
"""
self.title = 'Urwid Frontend Demo'
self.choices = 'Start Subprocess|Quit'.split('|')
self.response = None
self.item = deque(maxlen=10)
self.event_loop = urwid.SelectEventLoop()
# start the heartbeat
self.event_loop.alarm(0, self.heartbeat)
self.main = urwid.Padding(self.main_menu(), left=2, right=2)
self.top = urwid.Overlay(self.main, urwid.SolidFill(u'\N{MEDIUM SHADE}'),
align='center', width=('relative', 60),
valign='middle', height=('relative', 60),
min_width=20, min_height=9)
self.loop = urwid.MainLoop(self.top, palette=[('reversed', 'standout', ''), ], event_loop=self.event_loop)
self.subprocess = SubProcess(subprocess_main)
def exit_program(self, button):
raise urwid.ExitMainLoop()
def main_menu(self):
body = [urwid.Text(self.title), urwid.Divider()]
for c in self.choices:
button = urwid.Button(c)
urwid.connect_signal(button, 'click', self.handle_button, c)
body.append(urwid.AttrMap(button, None, focus_map='reversed'))
return urwid.ListBox(urwid.SimpleFocusListWalker(body))
def subproc_menu(self):
self.response = urwid.Text('Waiting ...')
body = [self.response, urwid.Divider()]
choices = ['Yup', 'Nope', 'Stop Subprocess']
for c in choices:
button = urwid.Button(c)
urwid.connect_signal(button, 'click', self.handle_button, c)
body.append(urwid.AttrMap(button, None, focus_map='reversed'))
listbox = urwid.ListBox(urwid.SimpleFocusListWalker(body))
return listbox
def update_subproc_menu(self, text):
self.response.set_text(text)
def handle_button(self, button, choice):
if choice == 'Start Subprocess':
self.main.original_widget = self.subproc_menu()
self.subprocess.fork()
self.item = deque(maxlen=10)
if choice == 'Stop Subprocess':
self.subprocess.stop()
self.main.original_widget = self.main_menu()
if choice == 'Quit':
self.exit_program(button)
if choice == 'Yup':
self.subprocess.write_pipe('Yup')
if choice == 'Nope':
self.subprocess.write_pipe('Nope')
def heartbeat(self):
"""
heartbeat that runs 24 times per second
"""
# read from the process
self.item.append(self.subprocess.read_pipe())
# display it
if self.response is not None:
self.update_subproc_menu(['Subprocess started\n', f'{self.item}\n', ])
self.loop.draw_screen()
# set the next beat
self.event_loop.alarm(1 / 24, self.heartbeat)
def run(self):
self.loop.run()
if __name__ == "__main__":
app = UrwidFrontend(subprocess_main)
app.run()
Most of the time I am using asyncio API. But how would I create an async generator "from scratch"?
Say I have a classic generator, and I want to make it async. How can I do that?
I naively thought that I could do something like below, but it does not run asynchronously (the 3 "for loops" are running one after the other, instead of concurrently). My hope was to make some_loop() asynchronous by calling it from some_async_loop() and releasing the event loop after each iteration with asyncio.sleep(0):
#!/usr/bin/env python3
import asyncio
async def run():
task = asyncio.ensure_future(run_async_loop())
asyncio.ensure_future(run_async_loop())
asyncio.ensure_future(run_async_loop())
await task
async def run_async_loop():
async for i in some_async_loop():
print(i)
async def some_async_loop():
for i in some_loop():
yield i
asyncio.sleep(0)
def some_loop():
for i in range(10):
yield i
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
I'm glad the fixing the call to asyncio.sleep() got things running.
I'm concerned partly about your run() function, because you're only await-ing on the first task, which means your code could exit before the other tasks are complete. I would suggest this:
async def run():
tasks = [run_async_loop(), run_async_loop(), run_async_loop()]
await asyncio.gather(*tasks)
I think you can also simplify your __main__ block:
if __name__ == '__main__':
asyncio.run(run())
I'm learning about asycio and trying to run this script I get this error:
ValueError: a coroutine was expected, got <async_generator object
mygen at 0x7fa2af959a60>
What am I missing here?
import asyncio
async def mygen(u=10):
"""Yield powers of 2."""
i = 0
while i < int(u):
yield 2 ** i
i += 1
await asyncio.sleep(0.1)
asyncio.run(mygen(5))
The asyncio.run function expects a coroutine but mygen is an asynchronous generator.
You can try something like this:
test.py:
import asyncio
async def mygen(u=10):
"""Yield powers of 2."""
i = 0
while i < int(u):
yield 2**i
i += 1
await asyncio.sleep(0.1)
async def main():
async for i in mygen(5):
print(i)
if __name__ == "__main__":
asyncio.run(main())
Test:
$ python test.py
1
2
4
8
16
References:
What does the "yield" keyword do?
PEP 525 -- Asynchronous Generators
this is my python3.8 code on MacOS, but it throws FileNotFoundError. And it's ok on python3.6 or python3.7. Very Strange!!!
# coding: utf-8
import time
from multiprocessing import Process, Lock
def task(name, lock):
lock.acquire()
print('%s 1' % name)
time.sleep(1)
print('%s 2' % name)
lock.release()
if __name__ == '__main__':
mutex = Lock()
for i in range(3):
p = Process(target=task, args=('Process %s' % i, mutex))
p.start()
And the result
......
"/Library/Frameworks/Python.framework/Versions/3.8/lib/python3.8/multiprocessing/synchronize.py", line 110, in __setstate__
self._semlock = _multiprocessing.SemLock._rebuild(*state)
FileNotFoundError: [Errno 2] No such file or directory
You need to add a join in there. Note, there is no guarantee the processes will start in numerical order as you require. you would need to add a delay in before the process start to ensure it was added to the execution queue in the order you expect.
# coding: utf-8
import time
from multiprocessing import Process, Lock
def task(name, lock):
lock.acquire()
print('%s 1' % name)
time.sleep(1)
print('%s 2' % name)
lock.release()
if __name__ == '__main__':
proceses = []
mutex = Lock()
for i in range(3):
p = Process(target=task, args=('Process %s' % i, mutex))
proceses.append(p)
p.start()
for p in proceses:
p.join()
A client connect websocket and calls tail_log method, and new client can't connect
How to solve this problem
def on_message(self, message):
def tail_log(user,ip,port,cmd,log_path,url):
cmd = "/usr/bin/ssh -p {port} {user}#{ipaddr} {command} {logpath}" \
.format(user=user, ipaddr=ip, port=port, command=cmd, logpath=log_path)
f = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
while True:
line = f.stdout.readline().strip()
if line == '':
self.write_message('failed')
break
self.write_message(line)
tail_log(user=SSH_USER,ip=IP_ADDR,cmd=CMD,port=SSH_PORT,log_path=LOG_PATH,url=SOCKET_URL)
Your infinite loop must yield control back to Tornado's event loop, either by executing a yield, await, or by returning from the tail_log function. Since your infinite loop does not yield control to the event loop, the event loop can never process any more events, including new websocket connections.
Try using Tornado's own process module to read from your subprocess's stdout asynchronously. Something like this:
import tornado.ioloop
import tornado.process
import tornado.web
import tornado.websocket
class TailHandler(tornado.websocket.WebSocketHandler):
def open(self):
self.write_message(u"Tailing....")
self.p = tornado.process.Subprocess(
"tail -f log.log",
stdout=tornado.process.Subprocess.STREAM,
stderr=tornado.process.Subprocess.STREAM,
shell=True)
tornado.ioloop.IOLoop.current().add_callback(
lambda: self.tail(self.p.stdout))
tornado.ioloop.IOLoop.current().add_callback(
lambda: self.tail(self.p.stderr))
self.p.set_exit_callback(self.close)
async def tail(self, stream):
try:
while True:
line = await stream.read_until(b'\n')
if line:
self.write_message(line.decode('utf-8'))
else:
# "tail" exited.
return
except tornado.iostream.StreamClosedError:
# Subprocess killed.
pass
finally:
self.close()
def on_close(self):
# Client disconnected, kill the subprocess.
self.p.proc.kill()
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("""<html><head><script>
var ws = new WebSocket("ws://localhost:8888/tail");
ws.onmessage = function (evt) {
document.write('<p>' + evt.data + '</p>');
};</script></head></html>""")
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
(r"/tail", TailHandler),
])
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
If you're not on Python 3.5 yet, substitute #gen.coroutine for "async def", substitute "yield" for "await", and substitute "break" for "return".