i'm getting this error while signing up users :
An error occurred (InvalidLambdaResponseException) when calling the SignUp operation: Unrecognizable lambda output.
my code :
from __future__ import print_function
import json
import boto3
import botocore.exceptions
import hmac
import hashlib
import base64
import json
import uuid
client = None
USER_POOL_ID = 'us-east-2_ssqaX****';
CLIENT_ID = '40kq19dp3tgaost148115****';
CLIENT_SECRET = '1rbns01jnv2ckiu1so8ntqjt8l6r****lgq2a4hni642s*****';
ERROR = 0
SUCCESS = 1
USER_EXISTS = 2
def get_secret_hash(username):
msg = username + CLIENT_ID
dig = hmac.new(str(CLIENT_SECRET).encode('utf-8'),
msg = str(msg).encode('utf-8'), digestmod=hashlib.sha256).digest()
d2 = base64.b64encode(dig).decode()
print("here" + d2)
return d2
def lambda_handler(event, context):
global client
if client == None:
client = boto3.client('cognito-idp')
body = event
username = body['username']
password = body['password']
resp = client.sign_up(
ClientId=CLIENT_ID,
SecretHash=get_secret_hash(username),
Username=username,
Password=password
)
return {"resp": resp}
An error occurred (InvalidLambdaResponseException) when calling the SignUp operation: Unrecognizable lambda output: InvalidLambdaResponseException
Related
I am trying to make a discord bot that returns the most popular videos in the KR region regarding a topic which actually is the message of the user.
So far i coded import discord
import googleapiclient.discovery
from discord.ext import commands
import typing
from discord_ui import UI
cmd = commands.Bot(command_prefix='.')
client = discord.Client()
token =
api_service_name = "youtube"
api_version = "v3"
DEVELOPER_KEY = 'googleapikeyhere'
youtube = googleapiclient.discovery.build(
api_service_name, api_version, developerKey=DEVELOPER_KEY)
#cmd.event
async def on_command_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.reply(f"You are on cooldown for {round(error.retry_after, 2)} seconds!")
# cmd.command()
# commands.cooldown(1, 4, commands.BucketType.user)
async def info(ctx, *args):
output2 = ''
for word in args:
output2 += str(word)
output2 += ' '
print(output2)
request3 = youtube.videos().list(
part="snippet, {}, statistics".format(output2),
chart="mostPopular",
regionCode="KR"
)
response3 = request3.execute()
print(response3)
channame = response3['items'][0]['snippet']['title']
chanid = response3['etag']
numberViews = response3['Items'][0]['statistics']['viewCount']
numberLikes = response3['Items'][0]['statistics']['likeCount']
embed2 = discord.Embed(title=f"{channame}", url=f"https://www.youtube.com/watch?v={chanid}",
numberviews=f"{numberViews}", numberlikes= f"{numberLikes}", color=0xFF5733)
response3 = request3.execute()
await ctx.send(embed=embed2)
cmd.run('findvideos')
but it results in the error discord has no attribute 'StoreData' why is that ?
I am working on a project, using WSGI server. My connect agents and users disconnected and again connected after some time with new sid (session IDs). Which is creating a problem. I need same sid throughout the user interaction on the web. And are eventlet used for?
New to sockets need help on this issue.
here is my code:
import socketio
import os
from dotenv import load_dotenv
import jwt
from Controllers.DatabaseController import DBController
DBHandler = DBController()
load_dotenv()
private_key = os.getenv('PRIVATE_KEY')
# headers = {'Authorization': private_key}4
sio = socketio.Server(ping_interval=25,ping_timeout=55)
app = socketio.WSGIApp(sio, static_files={
'/': './public/'
})
agents = {}
users = {}
total_agents = 0
def add_agents(sid, First_Name, Last_Name):
global total_agents
if (total_agents >= 0):
total_agents += 1
agents[sid] = {
'Type': 'Agent',
'First_Name': First_Name,
'Last_Name': Last_Name,
'Data': {}
}
return agents
def add_users(sid):
global total_users
if (total_users >= 0):
total_users += 1
users[total_users] = {
sid: {
'Data': ''
}
}
return agents
#sio.event
def connect(sid, environ):
global agents
token = environ.get('HTTP_X_USERNAME')
print(token)
if not token:
print(sid, 'connected')
return True
try:
user_session_data = jwt.decode(token, private_key, algorithms='HS256')
# if user_session_data is False:
# with sio.session(sid) as session:
# session['username'] = token
# sio.emit('user_joined', 1)
# print(sid, 'connected')
# users_list = add_users(sid)
# print(users_list)
#
# print(user_session_data)
result, colnames = DBHandler.GetAgentData(user_session_data['user'])
if result is not False:
First_Name = result[0][0]
Last_Name = result[0][1]
Username = result[0][2]
with sio.session(sid) as session:
session['username'] = token
#sio.emit(Username, 1)
print(sid, 'connected')
agents_list = add_agents(sid, First_Name, Last_Name)
sio.enter_room(sid, 'agents')
print(agents_list)
except Exception as e:
print(e)
print("wrong token ")
return False
#sio.event
def disconnect(sid):
print(sid, 'disconnected')
sio.leave_room(sid, 'agents')
del agents[sid]
#sio.event
def request_call(sid):
sio.emit('call_request', {'sid': sid}, room='agents')
#sio.event
def accept_call(sid):
sio.emit('call_accepted', {'sid': sid}, to=sid)
I am doing an IBM WebSphere MQ testing using JMeter. I have created the JSR223 sampler script based on the following reference link https://www.blazemeter.com/blog/ibm-mq-testing-with-jmeter-learn-how. Here i am using only one queue name(request) to inject my request into IBM MQ server.
But in my cases, i have to retrieve my response using different query name.
Example :
Request_queryname : DNB.LX.SRXX.LOGGING.IN
Response_queryname : DNB.LX.SRXX.LOGGING.OUT
So if i am passing my request using this query name "DNB.LX.SRXX.LOGGING.IN", i need to retrive my response with another query name "DNB.LX.SRXX.LOGGING.OUT".
May i know how to retrieve this response. I am having following parameters with me.
QueueManager : CLDACESP
Channelname : ACE.CONFIG.SXXCONN
Hostname : 172.25.XX.XX
Host port : 1414
queue name(request) :DNB.LX.SRXX.LOGGING.IN
queue name(response) :DNB.LX.SRXX.LOGGING.OUT
Just create a new session pointing to another queue, all you need to do is to duplicate the code and change the queue name.
I.e.
Producer:
import com.ibm.msg.client.jms.JmsFactoryFactory
import com.ibm.msg.client.wmq.WMQConstants
import javax.jms.Session
def hostName = "172.25.XX.XX"
def hostPort = 1414
def channelName = "DEV.APP.SVRCONN"
def queueManagerName = "QM1"
def queueName = "DNB.LX.SRXX.LOGGING.IN"
def ff = JmsFactoryFactory.getInstance(WMQConstants.WMQ_PROVIDER)
def cf = ff.createConnectionFactory()
cf.setStringProperty(WMQConstants.WMQ_HOST_NAME, hostName)
cf.setIntProperty(WMQConstants.WMQ_PORT, hostPort)
cf.setStringProperty(WMQConstants.WMQ_CHANNEL, channelName)
cf.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_CLIENT)
cf.setStringProperty(WMQConstants.WMQ_QUEUE_MANAGER, queueManagerName)
def conn = cf.createConnection("app", "test")
def sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE)
def destination = sess.createQueue(queueName)
conn.start()
def producer = sess.createProducer(destination)
def rnd = new Random(System.currentTimeMillis())
def payload = String.format("JMeter...IBM MQ...test message no. %09d!", rnd.nextInt(Integer.MAX_VALUE))
def msg = sess.createTextMessage(payload)
producer.send(msg)
producer.close()
Consumer:
import com.ibm.msg.client.jms.JmsFactoryFactory
import com.ibm.msg.client.wmq.WMQConstants
import javax.jms.Session
def hostName = "172.25.XX.XX"
def hostPort = 1414
def channelName = "DEV.APP.SVRCONN"
def queueManagerName = "QM1"
def queueName = "DNB.LX.SRXX.LOGGING.OUT"
def ff = JmsFactoryFactory.getInstance(WMQConstants.WMQ_PROVIDER)
def cf = ff.createConnectionFactory()
cf.setStringProperty(WMQConstants.WMQ_HOST_NAME, hostName)
cf.setIntProperty(WMQConstants.WMQ_PORT, hostPort)
cf.setStringProperty(WMQConstants.WMQ_CHANNEL, channelName)
cf.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_CLIENT)
cf.setStringProperty(WMQConstants.WMQ_QUEUE_MANAGER, queueManagerName)
def conn = cf.createConnection("app", "test")
def sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE)
conn.start()
def consumer = sess.createConsumer(destination)
def msg = consumer.receive(1)
//do what you need with this message(s)
consumer.close()
More information:
IBMMQ Classes for JMS
Apache Groovy - Why and How You Should Use It
I am getting this error, and cannot figure out what wrong I am doing:
Error invoking bsh method: eval In file: inline evaluation of: ``import java.util.Set; import java.util.Map; import java.util.List; try { // Map . . . '' Encountered "String" at line 17, column 9.
This is the code that I am using:
import java.util.Set;
import java.util.Map;
import java.util.List;
try
{
// Map<String,List<String>> map = new HashMap<String,List<String>>();
// map = vars.getObject("headerMap");
boolean isHeaderValid = false;
// String apiKeySent = "${x_api_key}"
// String clientIdSent = "${X_IBM_Client_id}"
// String clientSecretSent = "${X_IBM_Client_Secret}"
String apiKeySent = vars.get("x_api_key")
String clientIdSent = vars.get("X_Client_id")
String clientSecretSent = vars.get("X_Client_Secret")
log.info("apiKeySent: " + vars.get("x_api_key"))
log.info("clientIdSent: " + vars.get("X_Client_id"))
log.info("clientSecretSent: " + vars.get("X_Client_Secret"))
if(apiKeySent != "")
{
apiKeyRec = vars.get("apiKeyRec")
isHeaderValid = apiKeySent.equals(apiKeyRec)
}
Failure = isHeaderValid
}
catch(Exception e)
{
log.debug("Error in verification: ",e)
}
Could anyone please help me in figuring this out? Have been stuck at this for ages.
You need to add semicolons like this
import java.util.Set;
import java.util.Map;
import java.util.List;
try
{
// Map<String,List<String>> map = new HashMap<String,List<String>>();
// map = vars.getObject("headerMap");
boolean isHeaderValid = false;
// String apiKeySent = "${x_api_key}"
// String clientIdSent = "${X_IBM_Client_id}"
// String clientSecretSent = "${X_IBM_Client_Secret}"
String apiKeySent = vars.get("x_api_key");
String clientIdSent = vars.get("X_Client_id");
String clientSecretSent = vars.get("X_Client_Secret");
log.info("apiKeySent: " + vars.get("x_api_key"));
log.info("clientIdSent: " + vars.get("X_Client_id"));
log.info("clientSecretSent: " + vars.get("X_Client_Secret"));
if(apiKeySent != "")
{
apiKeyRec = vars.get("apiKeyRec");
isHeaderValid = apiKeySent.equals(apiKeyRec);
}
Failure = isHeaderValid;
}
catch(Exception e)
{
log.debug("Error in verification: ",e);
}
Since JMeter 3.1 you should be using JSR223 Test Elements and Groovy language for scripting so consider migrating to JSR223 Assertion and Groovy
Your script can be simplified to
AssertionResult.setFailure(vars.get('x_api_key') == vars.get('apiKeyRec'))
And you don't even need any scripting for comparing 2 variables, it can be done using "normal" Response Assertion
It looks like you are forgetting to end all of your statements with semicolons from line 12 on. Add semicolons and let me know how that works!
i use impala(JDBC) twice to get kafka offset and save data in foreachRDD.
but impala and kudu always shutdown. now i want to set connect pool, but little for scala.
it's my pseudo-code:
#node-1
val newOffsets = getNewOffset() // JDBC read kafka offset in kudu
val messages = KafkaUtils.createDirectStream(*,newOffsets,)
messages.foreachRDD(rdd => {
val spark = SparkSession.builder.config(rdd.sparkContext.getConf).getOrCreate()
#node-2
Class.forName(jdbcDriver)
val con = DriverManager.getConnection("impala url")
val stmt = con.createStatement()
stmt.executeUpdate(sql)
#node-3
val offsetRanges = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
offsetRanges.foreach { r => {
val rt_upsert = s"UPSert into ${execTable} values('${r.topic}',${r.partition},${r.untilOffset})"
stmt.executeUpdate(rt_upsert)
stmt.close()
conn.close()
}}
}
how to code by c3p0 or other ? I'll appreciate your help.
Below is the code for reading data from kafka and inserting the data to kudu.
import kafka.message.MessageAndMetadata
import kafka.serializer.StringDecoder
import org.apache.kudu.client.KuduClient
import org.apache.kudu.client.KuduSession
import org.apache.kudu.client.KuduTable
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.streaming.Milliseconds,
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.kafka.KafkaUtils
import scala.collection.immutable.List
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types._
import org.apache.kudu.Schema
import org.apache.kudu.Type._
import org.apache.kudu.spark.kudu.KuduContext
import scala.collection.mutable.ArrayBuffer
object KafkaKuduConnect extends Serializable {
def main(args: Array[String]): Unit = {
try {
val TopicName="TestTopic"
val kafkaConsumerProps = Map[String, String]("bootstrap.servers" -> "localhost:9092")
val KuduMaster=""
val KuduTable=""
val sparkConf = new SparkConf().setAppName("KafkaKuduConnect")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
val ssc = new StreamingContext(sc, Milliseconds(1000))
val kuduContext = new KuduContext(KuduMaster, sc)
val kuduclient: KuduClient = new KuduClient.KuduClientBuilder(KuduMaster).build()
//Opening table
val kudutable: KuduTable = kuduclient.openTable(KuduTable)
// getting table schema
val tableschema: Schema = kudutable.getSchema
// creating the schema for the data frame using the table schema
val FinalTableSchema =generateStructure(tableschema)
//To create the schema for creating the data frame from the rdd
def generateStructure(tableSchema:Schema):StructType=
{
var structFieldList:List[StructField]=List()
for(index <-0 until tableSchema.getColumnCount)
{
val col=tableSchema.getColumnByIndex(index)
val coltype=col.getType.toString
println(coltype)
col.getType match {
case INT32 =>
structFieldList=structFieldList:+StructField(col.getName,IntegerType)
case STRING =>
structFieldList=structFieldList:+StructField(col.getName,StringType)
case _ =>
println("No Class Type Found")
}
}
return StructType(structFieldList)
}
// To create the Row object with values type casted according to the schema
def getRow(schema:StructType,Data:List[String]):Row={
var RowData=ArrayBuffer[Any]()
schema.zipWithIndex.foreach(
each=>{
var Index=each._2
each._1.dataType match {
case IntegerType=>
if(Data(Index)=="" | Data(Index)==null)
RowData+=Data(Index).toInt
case StringType=>
RowData+=Data(Index)
case _=>
RowData+=Data(Index)
}
}
)
return Row.fromSeq(RowData.toList)
}
val messages = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaConsumerProps, Set(TopicName))
messages.foreachRDD(
//we are looping through eachrdd
eachrdd=>{
//we are creating the Rdd[Row] to create dataframe with our schema
val StructuredRdd= eachrdd.map(eachmessage=>{
val record=eachmessage._2
getRow(FinalTableSchema, record.split(",").toList)
})
//DataFrame with required structure according to the table.
val DF = sqlContext.createDataFrame(StructuredRdd, FinalTableSchema)
kuduContext.upsertRows(DF,KuduTable)
}
)
}
catch {
case NonFatal(e) => {
print("Error in main : " + e)
}
}
}
}