How do I represent a two-dimensional array in Protocol Buffers? - protocol-buffers

for example:
[[1,2],[3,4]...]
I just wanted to test if RPC supported a two-dimensional array, But there is something wrong, I am following the official docs.
The server is as follows:
data = [[i, 9] for i in range(128)]
class Greeter(hello_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return hello_pb2.HelloReply(results=data)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10), options=[
(cygrpc.ChannelArgKey.max_send_message_length, -1),
(cygrpc.ChannelArgKey.max_receive_message_length, -1)
])
hello_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50052')
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == '__main__':
serve()
The client is as follows:
def insecure_channel(host, port):
channel = grpc.insecure_channel(
target=host if port is None else '%s:%d' % (host, port),
options=[(cygrpc.ChannelArgKey.max_send_message_length, -1),
(cygrpc.ChannelArgKey.max_receive_message_length, -1)])
return grpc.beta.implementations.Channel(channel)
def run():
channel = grpc.insecure_channel('localhost:50052')
stub = hello_pb2_grpc.GreeterStub(channel)
st = time.time()
response = stub.SayHello(hello_pb2.HelloRequest(name='test'))
et = time.time() - st
print("Greeter client received: {}, {}".format(type(response.results), et))
if __name__ == '__main__':
run()
The protobuf definition is as follows:
syntax = "proto3";
package hello;
service Greeter {
rpc SayHello (HelloRequest) returns (HelloReply) {}
}
message HelloRequest {
string name = 1;
}
message HelloReply {
repeated Result results = 1;
}
message Result {
repeated int32 index = 1;
repeated int32 count = 2;
}
But I get the error like this:
File "greeter_server.py", line 19, in SayHello
return hello_pb2.HelloReply(results=data)
TypeError: Parameter to MergeFrom() must be instance of same class: expected hello.Result got list.
I am using python3.
Thanks in advance.

As per your question, [[1,2],[3,4]...] is an array of arrays.
Something like this should solve your problem:
message InternalArray {
repeated int internal_array = 1;
}
repeated InternalArray array = 1;

Problem is a list you are passing where hello.Result is expected.
Use:
import hello_pb2
to_return = hello_pb2.Result()
to_return.index = 111
to_return.count = 222
now return `to_return` from rpc call.

Related

python coroutine asyncio/ await / aiohttp

new in asyncio world.
going straight to the point...
I want to do/make a request(aiohttp) to a site.
if the wait for an answer pass than N seconds I want to stop the process of waiting.
Do the process again by setting a limit of attempts if needed.
async def search_skiping_add(name_search):
start_time = time()
async with aiohttp.ClientSession() as session:
url = f'https://somesitehere.com'
r = await session.get(url)
final_time = time()
result_time =round(final_time-start_time)
print(result_time)
Maybe, I know, have some way to do it synchronously, but it's an excuse to start using asyncio somehow too.
This should give you an idea of how to use async with aiohttp:
from aiohttp import ClientSession
from asyncio import gather, create_task, sleep, run
from traceback import format_exc
def returnPartionedList(inputlist: list, x: int = 100) -> list: # returns inputlist split into x parts, default is 100
return([inputlist[i:i + x] for i in range(0, len(inputlist), x)])
# You could change validate to an integer and thereby increasing the retry count as needed.
async def GetRessource(url: str, session: ClientSession, validate: bool = False) -> dict:
try:
async with session.get(url) as response:
if response.status == 200:
r: dict = await response.json() # Set equal to .text() to get results as a string
return(r)
else:
r: str = await response.text()
if not validate:
await sleep(3) # Sleep for x amount of seconds before retry
return(await GetRessource(url, session, True))
print(f"Error, got response code: {response.status} message: {r}")
except Exception:
print(f"General Exception:\n{format_exc()}")
return({})
async def GetUrls(urls: list) -> list:
resultsList: list = []
UrlPartitions: list = returnPartionedList(urls, 20) # Rate limit to 20 requests per loop
async with ClientSession(timeout=15) as session: # Timeout is equal to the time to wait in seconds before terminating, default is 300 seconds or 5 minutes.
for partition in UrlPartitions:
partitionTasks: list = [create_task(GetRessource(url, session)) for url in partition]
resultsList.append(await gather(*partitionTasks, return_exceptions=False))
return(resultsList) # Or you can do more processing here before returning
async def main():
urls: list = ["...", "...", "..."] # list of urls to get from
results: list = await GetUrls(urls)
print(results)
if __name__ == "__main__":
run(main())

Asychronous server hanging in python

Am currently using python asynchronous server to server some clients , the server works well at first then eventually hangs without showing if it is receiving and requests from clients, when I press ctr-c it then shows that it receives the requests from the clients and it does not stop, it just continues to revieve requests .
I don't where this bug is emanating from.
Thank u in advance
import asyncio, json
from collections import Coroutine
from typing import Any
import Engine
import struct
header_struct = struct.Struct("!Q") # messages up to 2**64 - 1 in length
async def recvall(reader, length):
blocks = []
while length:
block = await reader.read(length)
if not block:
raise EOFError('socket closed with {} bytes left in this block'.format(length))
length -= len(block)
blocks.append(block)
return b''.join(blocks)
async def get_block(reader):
data = await recvall(reader, header_struct.size)
(block_length,) = header_struct.unpack(data)
return await recvall(reader, block_length)
async def put_block(writer, message):
block_length = len(message)
writer.write(header_struct.pack(block_length))
writer.write(message)
# await writer.drain()
async def handle_conversation(reader, writer):
address__ = writer.get_extra_info("peername")
print("Accepted connection from {}".format(address__))
while True:
# ************************try to check if there data to send*********************************
try:
block = await get_block(reader)
# decode the data
data = block.decode()
decoded_data = json.loads(data)
# dont forget to make this synchronous
answer = await Engine.get_answer(decoded_data["Task"], decoded_data["content"])
# don't forget to check in there is necessary data to push and making sure data is conveyed
await put_block(writer, answer)
print(answer)
except Exception as e:
raise
if __name__ == '__main__':
address = Engine.parse_command_line("asyncio server using coroutine")
# loop = asyncio.get_event_loop()
# coro = asyncio.start_server(handle_conversation, *address)
async def main():
server = await asyncio.start_server(
handle_conversation, *address)
addr = server.sockets[0].getsockname()
print(f'Serving on {addr}')
async with server:
await server.serve_forever()
asyncio.run(main(), debug=True)
Engine Code
import argparse
import json
import time
import upload_pic_and_search
import accept_connections
import connect
import opinion_poll
import share
import zmq
from jsonrpclib import Server
context = zmq.Context()
aphorisms = {"share": share.share_,
"poll": opinion_poll.add_poll,
"add_profile_pic": upload_pic_and_search.profile_pic,
"connect": connect.connect,
"accept_connection": accept_connections.accept_connection}
def sighn_up(doc):
"""this function will connect to sighn up """
proxy = Server('http://localhost:7002')
answer = proxy.sighn_up(doc)
return answer
def Verification(doc):
"""Code verification routine"""
proxy = Server('http://localhost:7002')
answer = proxy.verify(doc)
return answer
def login(doc):
"""This function handkes authetication"""
proxy = Server('http://localhost:7002')
answer = proxy.autheticate(doc)
return answer
def post(doc):
"""connect to server that handles posts"""
proxy = Server('http://localhost:6700')
answer = proxy.post(doc)
return answer
def comment(doc):
"""connect to the server that stores comments"""
proxy = Server('http://localhost:6701')
answer = proxy.comments_(doc)
return answer
def reply(doc):
"""store the reply"""
proxy = Server('http://localhost:6702')
answer = proxy.reply(doc)
return answer
def share(doc):
"""share the post"""
proxy = Server('http://localhost:6703')
answer = proxy.share(doc)
return answer
def likes(doc):
"""connect to the likes queue"""
zcontext = zmq.Context()
osock = zcontext.socket(zmq.PUSH)
osock.connect("tcp://127.0.0.1:6704")
osock.send_json(doc)
return {"Task": "like", "like": True}
def follow(doc):
"""handles the follow coroutine"""
zcontext = zmq.Context()
osock = zcontext.socket(zmq.PUSH)
osock.connect("tcp://127.0.0.1:6705")
osock.send_json(doc)
def connect(doc):
"""connect to routine for connection"""
zcontext = zmq.Context()
osock = zcontext.socket(zmq.PUSH)
osock.connect("tcp://127.0.0.1:6706")
osock.send_json(doc)
def accept_connection(doc):
"""the queue responsible accepting connections"""
zcontext = zmq.Context()
osock = zcontext.socket(zmq.PUSH)
osock.connect("tcp://127.0.0.1:6707")
osock.send_json(doc)
def add_profile_pic(doc):
"""Add the profile pic of the user"""
proxy = Server('http://localhost:7006')
answer = proxy.profile_pic(doc)
return answer
def search(doc):
"""search the user in the database"""
proxy = Server('http://localhost:7006')
answer = proxy.search(doc)
return answer
def profile(doc):
"""search the user in the database"""
proxy = Server('http://localhost:7006')
answer = proxy.profile(doc)
return answer
async def get_answer(aphorism, content):
"""Return the response to particular question"""
# time.sleep(0.0)
# fetch responsible function
# function = aphorisms.get(aphorism, "Error:Unknown aphorism.")
function = eval(aphorism)
answer = function(content)
return send(answer)
def send(data):
"""Prepare the data to be sent via socket"""
json_data = json.dumps(data)
data_bytes = json_data.encode()
return data_bytes
def parse_command_line(description):
"""arse command line and return a socket address."""
parser = argparse.ArgumentParser(description=description)
parser.add_argument('host', help="IP or hostname")
parser.add_argument("-p", metavar='port', type=int, default=1060, help="TCP port (default 1060)")
args = parser.parse_args()
address = (args.host, args.p)
return address
def recv_untill(sock, suffix):
"""Receive bytes over socket `sock` until we receive the `suffix`."""
message = sock.recv(4096)
if not message:
raise EOFError("Socket closed")
while not message.endswith(suffix):
data = sock.recv(4096)
if not data:
raise IOError('received {!r} then socket closed'.format(message))
message += data
return message

How to store JSON.(discord.py)

So I tried storing a variable to a json file,but it gives me this error:
Object of type Command is not JSON serializable
The code something like this:
import discord
from discord.ext import commands
import json
num = 0
def store():
with open("num.json", 'w') as file:
json.dump(num, file)
bot = commands.Bot(command_prefix='!')
#bot.command()
async def increase():
num += 1
await ctx.send("I increased your number,now it is: " + num)
store()
Object of type Command is not JSON serializable says, that variable (or entity) you provided can't be transformed (serialized) into the JSON format.
In your case that is because you haven't follow key:value json rule. JSON is similar to the Python's dict (except keys can only be strings). So to be able to store your variable, you have to pass it to the json.dump function as following: {"my_var": num}.
So simply change your code to something like that:
def store():
with open("num.json", 'w') as file:
json.dump({"num": num}, file) #HERE'S THE DEAL! ENTITY SHOULD BE DICT!
bot = commands.Bot(command_prefix='!')
#bot.command()
async def increase():
num += 1
await ctx.send("I increased your number,now it is: " + num)
store()
You didnt open the json file, so you cant write to it.
try this:
num = 0
#bot.command()
async def increase():
num += 1
#opening the json
with open("num.json", "r") as file:
x = json.load(file)
#setting the number
x["num"] = num
#now store it
with open("num.json", "w") as file:
json.dump(file, x, indent=4)
await ctx.send("I increased your number,now it is: " + num)
the num.json should like this (before running the command):
{
"num": 0
}

How to get slice of concrete type (not pointers) in go protobuff

For protofile:
syntax = "proto3";
package messagepb;
import "github.com/gogo/protobuf/gogoproto/gogo.proto";
option (gogoproto.marshaler_all) = true;
option (gogoproto.sizer_all) = true;
option (gogoproto.unmarshaler_all) = true;
option (gogoproto.goproto_getters_all) = false;
service KV {
// Put puts the given key into the store.
// A put request increases the revision of the store,
// and generates one event in the event history.
rpc Put(PutRequest) returns (PutResponse) {}
}
message PutRequest {
bytes key = 1;
bytes value = 2;
}
message ResponseHeader {
repeated PutRequest l = 3;
}
I get the following proto struct:
type ResponseHeader struct {
L []*PutRequest `protobuf:"bytes,3,rep,name=l" json:"l,omitempty"`
}
But how do I get following protostruct:
type ResponseHeader struct {
L []PutRequest `protobuf:"bytes,3,rep,name=l" json:"l,omitempty"`
}
That is I want having data locality (and thus slices of contig data, not pointers to spread out stuff)
I needed to use: [(gogoproto.nullable) = false]
as in:
repeated PutRequest l = 3 [(gogoproto.nullable) = false];
And got:
L []PutRequest `protobuf:"bytes,3,rep,name=l" json:"l"`

autobahn mosquitto server chat

I´m trying to connect to a mosquitto broker using autobahn python.
If I use sub.py that has this code inside:
import mosquitto
def on_connect(mosq, obj, rc):
print("rc: "+str(rc))
def on_message(mosq, obj, msg):
print(msg.topic+" "+str(msg.qos)+" "+str(msg.payload))
def on_publish(mosq, obj, mid):
print("mid: "+str(mid))
def on_subscribe(mosq, obj, mid, granted_qos):
print("Subscribed: "+str(mid)+" "+str(granted_qos))
def on_log(mosq, obj, level, string):
print(string)
mqttc = mosquitto.Mosquitto()
mqttc.on_message = on_message
mqttc.on_connect = on_connect
mqttc.on_publish = on_publish
mqttc.on_subscribe = on_subscribe
# Uncomment to enable debug messages
mqttc.on_log = on_log
mqttc.connect("localhost", 1883, 60)
mqttc.subscribe("control", 0)
rc = 0
while rc == 0:
rc = mqttc.loop()
print("rc: "+str(rc))
it is connecting to the broker and retrieving all messages that a client publish to control channel.
I´d like to push somehow those messages using websockets to a webpage for that I am trying to use websocket autobahn py and modify the example from here http://autobahn.ws/python/getstarted#yourfirstserver
My code is like this
import sys
import mosquitto
from twisted.internet import reactor
from twisted.python import log
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class EchoServerProtocol(WebSocketServerProtocol):
def on_connect(mosq, obj, rc):
print("rc: "+str(rc))
def on_message(mosq, obj, msg):
print(msg.topic+" "+str(msg.qos)+" "+str(msg.payload))
def onMessage(self, msg, binary):
print "sending echo:", msg
self.sendMessage(msg, binary)
def on_publish(mosq, obj, mid):
print("mid: "+str(mid))
def on_subscribe(mosq, obj, mid, granted_qos):
print("Subscribed: "+str(mid)+" "+str(granted_qos))
mqttc = mosquitto.Mosquitto()
mqttc.on_message = on_message
mqttc.on_connect = on_connect
mqttc.on_publish = on_publish
mqttc.on_subscribe = on_subscribe
# Uncomment to enable debug messages
mqttc.on_log = on_log
mqttc.connect("192.168.2.109", 1883, 60)
mqttc.subscribe("control", 0)
rc = 0
while rc == 0:
rc = mqttc.loop()
print("rc: "+str(rc))
if __name__ == '__main__':
log.startLogging(sys.stdout)
factory = WebSocketServerFactory("ws://192.168.2.109:8899", debug = TRUE)
factory.protocol = EchoServerProtocol
listenWS(factory)
reactor.run()
but I receive this error when I try to run it:
root#Ubuntu:~/authobahn# python myserver.py Traceback (most recent
call last): File "myserver.py", line 30, in
mqttc.on_message = on_message NameError: name 'on_message' is not defined
The problem is that you have defined your on_message() function inside the EchoServerProtocol class. This means it is not visible to the global mqttc variable. You probably want to put all of the mqttc code inside your class as well, although it depends on what you actually want to achieve.
You could do something like the code below:
import mosquitto
class MyMQTTClass:
def __init__(self, clientid=None):
self._mqttc = mosquitto.Mosquitto()
self._mqttc.on_message = self.mqtt_on_message
self._mqttc.on_connect = self.mqtt_on_connect
self._mqttc.on_publish = self.mqtt_on_publish
self._mqttc.on_subscribe = self.mqtt_on_subscribe
def mqtt_on_connect(self, mosq, obj, rc):
print("rc: "+str(rc))
def mqtt_on_message(self, mosq, obj, msg):
print(msg.topic+" "+str(msg.qos)+" "+str(msg.payload))
def mqtt_on_publish(self, mosq, obj, mid):
print("mid: "+str(mid))
def mqtt_on_subscribe(self, mosq, obj, mid, granted_qos):
print("Subscribed: "+str(mid)+" "+str(granted_qos))
def mqtt_on_log(self, mosq, obj, level, string):
print(string)
def run(self):
self._mqttc.connect("test.mosquitto.org", 1883, 60)
self._mqttc.subscribe("$SYS/#", 0)
rc = 0
while rc == 0:
rc = self._mqttc.loop()
return rc
# If you want to use a specific client id, use
# mqttc = MyMQTTClass("client-id")
# but note that the client id must be unique on the broker. Leaving the client
# id parameter empty will generate a random id for you.
mqttc = MyMQTTClass()
rc = mqttc.run()
print("rc: "+str(rc))

Resources