Lambda function is completed but I'm unable to query Aurora DB - aws-lambda

Currently, I'm using a Lambda function, SSM and Aurora DB together.
I manage to get Aurora's credentials from SSM and the whole lambda is completed but I can't query the database. I took this tutorial as a reference which was also recommended by the aws support yet I'm still unable to use the Aurora Database
The following is my lambda code:
export async function handler (event:any)
{
try
{
console.log("Welcome to lambda")
const ssm = new SSM();
const username = await ssm
.getParameter({ Name:*** , WithDecryption: false })
.promise();
const password = await ssm
.getParameter({ Name:*** , WithDecryption: true })
.promise();
console.log("Before Connection")
let pool = mysql.createPool({
host: '***',
database: 'aurora-test1-us-mysql-multilevel',
user: username.Parameter?.Value,
password: password.Parameter?.Value,
port:3306
});
pool.getConnection(function(err, connection) {
// Use the connection
connection.query('SELECT * from feature', function (error, results, fields) {
console.log("result",results[0]);
// And done with the connection.
connection.release();
// Handle error after the release.
if (error)
console.log("Error: ",error)
else
console.log("result",results[0].emp_name);
});
});
let q = pool.query('SELECT * from feature')
console.log("Query",q)
return {
statusCode: 200,
body: "HELLO KIMOO!!! Welcome TO AURORA DB" + "Database Created2"
}
}
catch(err)
{
console.log("Error caught",err)
return {
statusCode: 500,
body: JSON.stringify({
message: 'Error: ' + err
})
}
}
}
The following is the the output for console.log("Query",q):
Query <ref *1> Query {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
_callback: undefined,
_callSite: Error
at Pool.query (/var/task/node_modules/mysql/lib/Pool.js:199:23)
at Runtime.<anonymous> (/var/task/index.js:48:26)
at Generator.next (<anonymous>)
at fulfilled (/var/task/index.js:5:58)
at processTicksAndRejections (node:internal/process/task_queues:96:5),
_ended: false,
_timeout: undefined,
_timer: Timer { _object: [Circular *1], _timeout: null },
sql: 'SELECT * from feature',
values: undefined,
typeCast: true,
nestTables: false,
_resultSet: null,
_results: [],
_fields: [],
_index: 0,
_loadError: null,
[Symbol(kCapture)]: false
}
NOTE:
There are no logs for console.log("result",results[0]);

Related

Lambda function doesn't trigger in Cognito post confirmation

I used AWS amplify to export user signup info to Cognito, and new users are shown correctly in the Cognito user pool, then I created a Lambda function to transfer user details from Cognito to DynamoDB and set this Lambda function as the post confirmation trigger of the Cognito user pool, but user still cannot be transferred to DynamoDB.
the lambda function works well so I think it's the problem of Cognito or amplify or maybe role access. but I have set lambda function role has all Cognito access.
My lambda function:
var aws = require('aws-sdk');
var ddb = new aws.DynamoDB({apiVersion: '2012-10-08'});
exports.handler = async (event, context) => {
console.log(event);
let date = new Date();
const tableName = process.env.TABLE_NAME;
const region = process.env.REGION;
console.log("table=" + tableName + " -- region=" + region);
aws.config.update({region: region});
// If the required parameters are present, proceed
if (event.request.userAttributes.sub) {
// -- Write data to DDB
let ddbParams = {
Item: {
'sub': {S: event.request.userAttributes.sub},
'given_name': {S: event.request.userAttributes.given_name},
'family_name': {S: event.request.userAttributes.family_name},
'email': {S: event.request.userAttributes.email},
'email_verified': {S: event.request.userAttributes.email_verified},
'teamName': {S: event.request.userAttributes.teamName},
'ifManager': {S: event.request.userAttributes.if_you_are_a_manager},
'createdAt': {S: date.toISOString()},
},
TableName: tableName
};
console.log(ddbParams)
// Call DynamoDB
try {
await ddb.putItem(ddbParams).promise()
console.log("Success");
} catch (err) {
console.log("Error", err);
}
console.log("Success: Everything executed correctly");
context.done(null, event);
} else {
// Nothing to do, the user's email ID is unknown
console.log("Error: Nothing was written to DDB or SQS");
context.done(null, event);
}
};
My amplify file:
import { Amplify } from 'aws-amplify';
import { Authenticator } from '#aws-amplify/ui-react';
import '#aws-amplify/ui-react/styles.css';
import awsExports from './amplifyConfig';
Amplify.configure(awsExports);
const formFields = {
signUp: {
username: {
order:1,
labelHidden: false,
label:'Email:',
placeholder: 'Enter your email',
isRequired: true,
},
given_name: {
order: 2,
label: 'First name:',
placeholder: 'Enter your first name',
isRequired: true,
},
family_name: {
order: 3,
label: 'Last name:',
placeholder: 'Enter your last name',
isRequired: true,
},
'custom:teamName':{
order:4,
label: 'Team name:',
placeholder: 'Enter your team name',
isRequired: true,
},
'custom:if_you_are_a_manager':{
order:5,
label: 'Are you a manager (Yes/No)?',
placeholder: 'Please type Yes or No',
isRequired: true,
},
password: {
order: 6
},
confirm_password: {
order: 7
}
},
}
export default function AmplifyAuth() {
return (
<Authenticator formFields={formFields}>
{({ signOut, user }) => (
<main>
<h1>Hello {user.attributes.given_name}</h1>
<button onClick={signOut}>Sign out</button>
</main>
)}
</Authenticator>
);
}

#ConnectMapping not getting triggered even after sending SETUP frame from rsocket js client

I have been running a sample RSocket project from (https://dzone.com/articles/rsocket-with-spring-boot-amp-js-zero-to-hero) where I was trying to store requestor Object per client Id.
Rsocket JS Client :
client = new RSocketClient({
serializers: {
data: JsonSerializer,
metadata: IdentitySerializer
},
setup: {
payload: {
data: "121212",
metadata: String.fromCharCode("client-id".length) + "client-id"
},
keepAlive: 60000,
lifetime: 180000,
dataMimeType: "application/json",
metadataMimeType: "message/x.rsocket.routing.v0"
},
transport: new RSocketWebSocketClient({
url: 'ws://localhost:8080/tweetsocket'
}),
});
// Open the connection
client.connect().subscribe({
onComplete: socket => {
// socket provides the rsocket interactions fire/forget, request/response,
// request/stream, etc as well as methods to close the socket.
socket.requestStream({
data: {
'author': document.getElementById("author-filter").value
},
metadata: String.fromCharCode('tweets.by.author'.length) + 'tweets.by.author',
}).subscribe({
onComplete: () => console.log('complete'),
onError: error => {
console.log(error);
addErrorMessage("Connection has been closed due to ", error);
},
onNext: payload => {
console.log(payload.data);
reloadMessages(payload.data);
},
onSubscribe: subscription => {
subscription.request(2147483647);
},
});
},
onError: error => {
console.log(error);
addErrorMessage("Connection has been refused due to ", error);
},
onSubscribe: cancel => {
/* call cancel() to abort */
}
});
}
Rsocket Java Server :
#ConnectMapping(value = "client-id")
public void onConnect(RSocketRequester rSocketRequester, #Payload String clientId) {
logger.info(clientId);
rSocketRequester.rsocket()
.onClose() // (1)
// .doFirst(() -> logger.info("Client Connected "))
// .doOnError( error -> logger.info("Channel to client {} CLOSED", error)) // (3)
.subscribe(null, null, () -> logger.info("herererer"));
}
#MessageMapping("tweets.by.author")
public Flux<Tweet> getByAuthor(TweetRequest request) {
return service.getByAuthor(request.getAuthor());
}
I have been running the application in debug mode and am unable to trigger onConnect ConnectMapping annotated method. Please help to understand what am I missing here.
The issue here is potentially a few things:
It's best to use composite metadata when defining a route
When using composite metadata, it's important to use IdentitySerializer for metadata
IdentitySerializer will pass the value of metadata through as-is, without any modification. This is important as the value for metadata is already encoded as it should be from encodeCompositeMetadata(...).
import {
BufferEncoders,
JsonSerializer,
RSocketClient,
APPLICATION_JSON,
MESSAGE_RSOCKET_COMPOSITE_METADATA,
encodeRoute, MESSAGE_RSOCKET_ROUTING, encodeCompositeMetadata, IdentitySerializer
} from "rsocket-core";
import RSocketWebSocketClient from "rsocket-websocket-client";
const client = new RSocketClient({
serializers: {
data: JsonSerializer,
metadata: IdentitySerializer
},
setup: {
payload: {
data: "121212",
metadata: encodeCompositeMetadata([
[MESSAGE_RSOCKET_ROUTING, encodeRoute("client-id")],
])
},
keepAlive: 60000,
lifetime: 180000,
dataMimeType: APPLICATION_JSON.string,
metadataMimeType: MESSAGE_RSOCKET_COMPOSITE_METADATA.string,
},
transport: new RSocketWebSocketClient({
url: 'ws://localhost:8080/tweetsocket'
}, BufferEncoders),
});

aws-sdk js runJobFlow from lambda not sending request to launch EMR cluster

I'm trying to launch an EMR cluster from a lambda that gets triggered from an event in an S3 bucket.
The trigger works fine and then I create an AWSRequest with runJobFlow but there are no events in the EMR Console nor the cluster is started.
Everything including the AWSRequest created is logged into CloudWatch but no error is logged.
It just doesn't do anything
Here is the code:
const aws = require('aws-sdk');
const emr = new aws.EMR({
apiVersion: '2009-03-31',
region: 'us-east-1'
});
const emrClusterConfig = (s3_input_path, s3_output_path) => {
const ret = {
Name:`cluster-for-job`,
ServiceRole: 'EMR_DefaultRole',
JobFlowRole: 'EMR_EC2_DefaultRole',
VisibleToAllUsers: true,
ScaleDownBehavior: 'TERMINATE_AT_TASK_COMPLETION',
LogUri: 's3n://log-uri/elasticmapreduce/',
ReleaseLabel: 'emr-5.29.0',
Instances:{
InstanceGroups: [
{
Name: 'Master Instance Group',
Market: 'ON_DEMAND',
InstanceRole: 'MASTER',
InstanceType: 'm5.xlarge',
InstanceCount: 1,
EbsConfiguration: {
EbsBlockDeviceConfigs: [
{
VolumeSpecification: {
SizeInGB: 32,
VolumeType: 'gp2',
},
VolumesPerInstance: 2
},
]
},
},
{
Name: 'Core Instance Group',
{... similar to master ...}
}
],
Ec2KeyName: 'my-keys',
Ec2SubnetId: 'my-subnet-id',
EmrManagedSlaveSecurityGroup:'sg-slave-security-group',
EmrManagedMasterSecurityGroup:'sg-master-security-group',
KeepJobFlowAliveWhenNoSteps: false,
TerminationProtected: false
},
Applications:[
{
'Name': 'Spark'
},
],
Configurations:[{
"Classification":"spark",
"Properties":{}
}],
Steps:[{
'Name': 'step',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'HadoopJarStep': {
'Jar': 's3n://elasticmapreduce/libs/script-runner/script-runner.jar',
'Args': [
"/usr/bin/spark-submit", "--deploy-mode", "cluster",
's3://path-to-a-very-humble.jar', s3_input_path, s3_output_path
]
}
}],
}
return ret
}
exports.handler = async (event, context) => {
const record = event.Records[0];
const eventName = record.eventName;
if(eventName === 'ObjectCreated:Put' || eventName === 'ObjectCreated:Post' || eventName === 'ObjectCreated:CompleteMultipartUpload' || eventName === 'ObjectCreated:Copy'){
const s3_inputPath = 's3n://in-bucket/key';
const s3_outputPath = 's3n://out-bucket/key';
try{
const cluster_config = emrClusterConfig(s3_inputPath,s3_outputPath);
const AWS_EMRJobRequest = emr.runJobFlow(cluster_config)
AWS_EMRJobRequest
.on('success', function(response){ console.log("success => " + response)})
.on('error', function(response){ console.log("error => " + response)})
.on('complete', function(response){ console.log("complete => " + response)})
.send( function(err, data){
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
context.done(null,'λ Completed');
});
console.log('Finished Launching EMR cluster: ', AWS_EMRJobRequest)
}
catch(err){
console.log(err);
}
}
else{
console.log(`:: not interested in event ${eventName}`);
}
context.done(null, 'λ Completed');
};
I have set up these clusters before manually and they work fine. I copied the cluster configuration from the information in the AWS CLI Export, to match the settings of my existing clusters.
This just doesn't do anything, just logs at the end "Finished Launching EMR Cluster" with the request obj but nothing happens.
aws is terminating the function before the response is received because AWSRequest send out the request async. Since your using the async handler you can use AWS.Request.promise. This immediately starts the service call and returns a promise that is either fulfilled with the response data property or rejected with the response error property.
let AWS_EMRJobRequest = emr.runJobFlow(cluster_config);
return AWS_EMRJobRequest.promise();
refer to documentation for more information.

aws lambda with rds mysql DDL command not working

I have created lambda function which is successfully connected to database when using DML command like
"Select" or "insert" but when i am try to "created database" or "create table" its return success but table or database not created into RDS .
in-short insert query insert records and select return same records so no issue in connection but create commands not working.
Followings are my lambda policy
const mysql = require('mysql');
const con = mysql.createConnection({
host : process.env.RDS_HOSTNAME,
user : process.env.RDS_USERNAME,
password : process.env.RDS_PASSWORD,
port : process.env.RDS_PORT,
connectionLimit : 10,// max number of connections before your pool starts waiting for a release
multipleStatements : true,// Prevent nested sql statements
connectionLimit : 1000,
connectTimeout : 60 * 60 * 1000,
acquireTimeout : 60 * 60 * 1000,
timeout : 60 * 60 * 1000,
debug : true
// ,database:'testdb1'
});
exports.handler = async (event) => {
con.query('CREATE DATABASE testdb1', function (err, result) {
if (err){
console.log("Error->"+err);
throw err;
}
});
return "Completed..."
};
The lambda should wait for queries to complete. Let's first fix that. After that, check if the lambda reports back any errors when trying to create the database.
const mysql = require('mysql');
const con = mysql.createConnection({
host: process.env.RDS_HOSTNAME,
user: process.env.RDS_USERNAME,
password: process.env.RDS_PASSWORD,
port: process.env.RDS_PORT,
connectionLimit: 10,
multipleStatements: true,// Prevent nested sql statements
connectionLimit: 1000,
connectTimeout: 60 * 60 * 1000,
acquireTimeout: 60 * 60 * 1000,
timeout: 60 * 60 * 1000,
debug: true
// ,database:'testdb1'
});
exports.handler = async (event) => {
try {
const data = await new Promise((resolve, reject) => {
con.connect(function (err) {
if (err) {
reject(err);
}
con.query('CREATE DATABASE testdb1', function (err, result) {
if (err) {
console.log("Error->" + err);
reject(err);
}
resolve(result);
});
})
});
return {
statusCode: 200,
body: JSON.stringify(data)
}
} catch (err) {
return {
statusCode: 400,
body: err.message
}
}
};

Lambda function – GET doesn't return anything

I am completely new to The Serverless Framework and AWS lambda.
When making a GET request to http://localhost:3000/user/1e89a3f0-d170-11e9-94bd-91e9ae84f3e9 I would expect a response being send back to the browser with a valid JSON object matching the Key. Like the only getting logged out to the console. And not empty document.
Am I returning incorrectly? I am having difficulties debugging this, I don’t now if the problem is with my lambda function, or what it is.
Thank you.
console.log statement
{
email: 'i#am.com',
password: '$argon2i$v=19$m=4096,t=3,p=1$IIICgcMqbUA7wFpEMqb/GA$ENScjko+Y8pruQsTiE6qN81QAJfAPX/T116RQZqe347Y1p0rez4KhKaEulMeabKKiu8',
id: '1e89a3f0-d170-11e9-94bd-91e9ae84f3e9'
}
Here is the get handler in question.
users/get.js
const AWS = require("aws-sdk");
const dynamoDb = new AWS.DynamoDB.DocumentClient({
region: "localhost",
endpoint: "http://localhost:8000"
});
module.exports.get = async event => {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id
}
};
dynamoDb.get(params, (error, result) => {
if (error) {
console.error(error);
return;
}
console.log(result.Item); // logs successfully to the console.
return {
// doesn't return a response.
statusCode: 200,
body: JSON.stringify(result.Item)
};
});
};
serverless.yml
# EXCERPT
functions:
get:
handler: users/get.get
events:
- http:
method: get
path: user/{id}
cors: true
resources:
Resources:
UsersDynamoDbTable:
Type: "AWS::DynamoDB::Table"
DeletionPolicy: Retain
Properties:
AttributeDefinitions:
- AttributeName: id
AttributeType: S
KeySchema:
- AttributeName: id
KeyType: HASH
ProvisionedThroughput:
ReadCapacityUnits: 1
WriteCapacityUnits: 1
TableName: ${self:provider.environment.DYNAMODB_TABLE}
custom:
dynamodb:
stages:
- dev
start:
port: 8000
inMemory: true
sharedDb: true
noStart: true
You should either use the callback argument to return a response:
module.exports.get = (event, context, callback) => {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id,
},
};
dynamoDb.get(params, (error, result) => {
if (error) {
console.error(error);
callback({
statusCode: 500,
body: 'Unable to get item',
});
}
console.log(result.Item);
callback(null, {
statusCode: 200,
body: JSON.stringify(result.Item),
});
});
};
Or use promises:
module.exports.get = async event => {
try {
const params = {
TableName: process.env.DYNAMODB_TABLE,
Key: {
id: event.pathParameters.id,
},
};
const result = await dynamoDb.get(params).promise();
console.log(result.Item);
return {
statusCode: 200,
body: JSON.stringify(result.Item),
};
} catch (error) {
console.error(error);
return {
statusCode: 500,
body: 'Unable to get item',
};
}
};

Resources