import asyncio, aiohttp, logging, time, random
pause = 1/10
async def req(i):
await asyncio.sleep(random.randint(1, 5))
async def run():
for i in range(100):
asyncio.ensure_future(req(i))
t0 = time.time()
await asyncio.sleep(pause)
print(time.time() - t0)
tasks = asyncio.Task.all_tasks()
if len(tasks) != 1:
tasks.remove(asyncio.Task.current_task())
await asyncio.wait(tasks)
loop = asyncio.get_event_loop()
loop.run_until_complete(run())
The output is:
Why await asyncio.sleep(pause) was finished after 0.093654s????????????
It a bug/feature of asyncio on Windows. You can read discussion here.
Related
How do I create a streamlit dashboard that doesn't continuously append the new values from the asyncio.gather(firstWorker(),secondWorker()? The code below runs but will very quickly turn into a very long dashboard where I was hoping to figure out if I could just 2 fixed streamlit title or metric to represent workerOne workerTwo where only the val_one and val_two are updated on the dashboard. streamlit metric or text elements seem to all have the same behavior of continuously appending...any tips appreciated.
Am using python 3.10 on Windows 10 running the dashboard with: $ streamlit run app.py
import streamlit as st
import asyncio
import random as r
val_one = 0
val_two = 0
st.title("Hello World")
async def firstWorker():
global val_one
while True:
await asyncio.sleep(r.randint(1, 3))
val_one = r.randint(1, 10)
st.metric("First Worker Executed: ",val_one)
async def secondWorker():
global val_two
while True:
await asyncio.sleep(r.randint(1, 3))
val_two = r.randint(1, 10)
st.metric("Second Worker Executed: ",val_two)
async def main():
await(asyncio.gather(
firstWorker(),
secondWorker()
)
)
if __name__ == '__main__':
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
asyncio.run(main())
except KeyboardInterrupt:
pass
finally:
print("Closing Loop")
loop.close()
Trying to avoid if possible an infinite long dashboard as well as have the anyscio processes run in the background even if the streamlit dashboard isnt being utilized by a web browser.
Put the code after if __name__ == '__main__': into st.empty() container to overwrite the previous content whenever an update is made.
import streamlit as st
import asyncio
import random as r
val_one = 0
val_two = 0
st.title("Hello World")
async def firstWorker():
global val_one
while True:
await asyncio.sleep(r.randint(1, 3))
val_one = r.randint(1, 10)
st.metric("First Worker Executed: ",val_one)
async def secondWorker():
global val_two
while True:
await asyncio.sleep(r.randint(1, 3))
val_two = r.randint(1, 10)
st.metric("Second Worker Executed: ",val_two)
async def main():
await(asyncio.gather(
firstWorker(),
secondWorker()
)
)
if __name__ == '__main__':
with st.empty(): # Modified to use empty container
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
asyncio.run(main())
except KeyboardInterrupt:
pass
finally:
print("Closing Loop")
loop.close()
Output:
Version 2:
st.title("Hello World")
async def firstWorker():
await asyncio.sleep(r.randint(1, 3))
val_one = r.randint(1, 10)
st.metric("First Worker Executed: ", val_one)
async def secondWorker():
await asyncio.sleep(r.randint(1, 3))
val_two = r.randint(1, 10)
st.metric("Second Worker Executed: ", val_two)
async def main():
with st.empty():
while True:
left_col, right_col = st.columns(2)
with left_col:
await (asyncio.gather(firstWorker()))
with right_col:
await (asyncio.gather(secondWorker()))
if __name__ == '__main__':
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
asyncio.run(main())
except KeyboardInterrupt:
pass
finally:
print("Closing Loop")
loop.close()
Output:
I don't know why you use global variables. I got rid of them this time unless otherwise you need them.
new in asyncio world.
going straight to the point...
I want to do/make a request(aiohttp) to a site.
if the wait for an answer pass than N seconds I want to stop the process of waiting.
Do the process again by setting a limit of attempts if needed.
async def search_skiping_add(name_search):
start_time = time()
async with aiohttp.ClientSession() as session:
url = f'https://somesitehere.com'
r = await session.get(url)
final_time = time()
result_time =round(final_time-start_time)
print(result_time)
Maybe, I know, have some way to do it synchronously, but it's an excuse to start using asyncio somehow too.
This should give you an idea of how to use async with aiohttp:
from aiohttp import ClientSession
from asyncio import gather, create_task, sleep, run
from traceback import format_exc
def returnPartionedList(inputlist: list, x: int = 100) -> list: # returns inputlist split into x parts, default is 100
return([inputlist[i:i + x] for i in range(0, len(inputlist), x)])
# You could change validate to an integer and thereby increasing the retry count as needed.
async def GetRessource(url: str, session: ClientSession, validate: bool = False) -> dict:
try:
async with session.get(url) as response:
if response.status == 200:
r: dict = await response.json() # Set equal to .text() to get results as a string
return(r)
else:
r: str = await response.text()
if not validate:
await sleep(3) # Sleep for x amount of seconds before retry
return(await GetRessource(url, session, True))
print(f"Error, got response code: {response.status} message: {r}")
except Exception:
print(f"General Exception:\n{format_exc()}")
return({})
async def GetUrls(urls: list) -> list:
resultsList: list = []
UrlPartitions: list = returnPartionedList(urls, 20) # Rate limit to 20 requests per loop
async with ClientSession(timeout=15) as session: # Timeout is equal to the time to wait in seconds before terminating, default is 300 seconds or 5 minutes.
for partition in UrlPartitions:
partitionTasks: list = [create_task(GetRessource(url, session)) for url in partition]
resultsList.append(await gather(*partitionTasks, return_exceptions=False))
return(resultsList) # Or you can do more processing here before returning
async def main():
urls: list = ["...", "...", "..."] # list of urls to get from
results: list = await GetUrls(urls)
print(results)
if __name__ == "__main__":
run(main())
I am trying to do the level system and I succeeded by following a tutorial.
But when I restart my bot it makes me start leveling again by creating new data on the json not removing the old ones and after 2 hours of different attempts I don't know how to solve.
EDIT: I decided to update the code by removing the functions and see how it went. Although I notice an improvement, it sometimes resets the data to hell and I don't understand why.
Code:
#with functions
import discord
import random
from discord import client
from discord.ext import commands
import json
from discord.utils import get
from random import choice
client = commands.Bot(command_prefix='°')
users = {}
#client.event
async def on_ready():
print('Bot online')
global users
try:
with open('ranking.json') as f:
users = json.load(f)
except FileNotFoundError:
print("Impossibile caricare ranking.json")
users = {}
#client.event
async def on_message(message):
if message.author == client.user:
return
xp = random.randrange(5, 10)
await update_data(users, message.author)
await add_experience(users, message.author, xp)
await level_up(users, message.author, message)
_save()
await client.process_commands(message)
async def update_data(users, user):
if user.id not in users:
print("pass")
users[user.id] = {}
users[user.id]["experience"] = 0
users[user.id]["level"] = 0
async def add_experience(users, user, xp):
users[user.id]["experience"] += xp
async def level_up(users, user, message):
experience = users[user.id]["experience"]
lvl_start = users[user.id]["level"]
lvl_end = int(experience ** (1/4))
print(lvl_start)
print(lvl_end)
if lvl_start < lvl_end:
await message.channel.send(f"{user.mention} è salito al livello {lvl_end}")
users[user.id]["level"] = lvl_end
def _save():
with open('ranking.json', 'w+') as f:
json.dump(users, f)
#without functions
import discord
import random
from discord import client
from discord.ext import commands
import json
from discord.utils import get
from random import choice
client = commands.Bot(command_prefix='°')
users = {}
#client.event
async def on_ready():
print('Bot online')
global users
try:
with open('ranking.json') as f:
users = json.load(f)
except FileNotFoundError:
print("Impossibile caricare ranking.json")
users = {}
#client.event
async def on_message(message):
id_user = str(message.author.id)
if message.author == client.user:
return
xp = random.randrange(5, 10)
if id_user not in users:
print(message.author.name)
users[id_user] = {}
users[id_user]["experience"] = 0
users[id_user]["level"] = 0
users[id_user]["experience"] += xp
experience = users[id_user]["experience"]
lvl_start = users[id_user]["level"]
lvl_end = int(experience ** (1 / 4))
if lvl_start < lvl_end:
await message.channel.send(f"{message.author.mention} è salito al livello {lvl_end}")
users[id_user]["level"] = lvl_end
_save()
await client.process_commands(message)
def _save():
with open('ranking.json', 'w+') as f:
json.dump(users, f)
ranking.json
{"488826524791734275": {"experience": 56, "level": 2}, "488826524791734275": {"experience": 32, "level": 2}}
As you can see from the json my user id is repeated two times and from the screen my messages where I restarted the bot during execution.
Is it possible to be able to rank every 30 seconds? i have no idea how to do it.
JSON keys can't be INT so change user.id to str(user.id)
How do I have a static timer where the user can just say ??abc and the bot does a countdown from lets say 300s I want the bot to edit the same mssg and not send multiple mssgs thanks
import asyncio
#bot.command
async def countdown(ctx, sec:int):
msg = await ctx.send(f'{sec}s')
for second in range(sec, 0, -1):
await msg.edit(f'{second}s')
await asyncio.sleep(1)
#client.command(help="Countdown from specified seconds!")
async def countdown(ctx, t: int):
msg = await ctx.send(f'Counting down from {t}!')
while t > 0:
t -=1
await msg.edit(content=f'{t} seconds remaining')
await asyncio.sleep(1)
await ctx.send(f'Countdown end reached! {ctx.message.author.mention}')
Do understand what all this means don't want to spoon-feed anybody ;).
GL
I have doubts about Python's await and hava an example,it tries to use await to fetch results from the future obj.
import time
import asyncio
import time
import random
import threading
db = {
"yzh": "pig",
"zhh": "big pig"
}
loop = asyncio.get_event_loop()
def _get_redis(username, clb):
def foo():
data = db[username]
time.sleep(0.1)
print("start clb")
clb(data)
t1 = threading.Thread(target=foo)
t1.start()
def get_redis(username):
print("start get redis")
myfuture = asyncio.Future()
def clb(result):
print("clb call")
myfuture.set_result(result)
_get_redis(username, clb)
return myfuture
async def main():
print("start main")
data = await get_redis("yzh")
print("data is {}".format(data))
loop.run_until_complete(asyncio.ensure_future(main()))
loop.close()
and i got output without future's result:
start main
start get redis
start clb
clb call
How should i use await to get the future's result?I tried many times. Thanks for your help.
As you said in your comment, you're supposed to use loop.call_soon_threadsafe when running an asyncio callback from a thread:
loop.call_soon_threadsafe(myfuture.set_result, result)
However, a better approach for calling a synchronous function from asyncio is to use loop.run_in_executor:
def _get_redis(username):
time.sleep(0.1)
return db[username]
async def get_redis(username):
return await loop.run_in_executor(None, _get_redis, username)
This way, you won't have to deal with futures and thread-safe callbacks.