Node.js getitem from dynamoDB and pass variable - aws-lambda

I'm working on some labmda code in Node.js, and I want to pass an item gotten from DynamoDB with getitem to some code. Here's what I have:
const Alexa = require('ask-sdk');
const AWS = require ('aws-sdk');
AWS.config.update({region: 'us-east-1'});
//replace dynamo later with dynamo
dynamodb = new AWS.DynamoDB();
//const appId = 'REPLACE WITH YOUR SKILL APPLICATION ID';
const date = new Date(Date.now()).toString();
const date2 = date.substring(0,16);
const time = date.substring(16,24);
const LaunchRequestHandler = {
canHandle(handlerInput) {
//first we assign the requestEnvelope request to the request variable
const request = handlerInput.requestEnvelope.request;
//conditions to determine the requests this handler can handle
//now using the request variable, we return true if it equals the one we want
//in this case LaunchRequest
return request.type === 'LaunchRequest'
},
handle(handlerInput) {
//execution logic for the handler
// ie What exactly do we do
const speechOutput =
"Welcome to Track it, you can tell me who you lent things to, \
or where you put an item. For example I lent the saw to Mike, or I put the saw in the workshop."
return handlerInput.responseBuilder
.speak(speechOutput)
.getResponse();
}
}
const GaveIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'GaveIntent';
},
handle(handlerInput,event) {
// setting variable attributes to handle things like counters
const attributes = handlerInput.attributesManager.getSessionAttributes();
// personName and itemName are pulling the slots information from the intent
const personName = handlerInput.requestEnvelope.request.intent.slots.lenderPerson.value;
const itemName = handlerInput.requestEnvelope.request.intent.slots.storedObject.value;
const currentUser = handlerInput.requestEnvelope.session.user.userId;
//begin DB code
var params = {
TableName: 'TrackItDB',
Item: {
'userID' : {S: currentUser},
'storedObject' : {S: itemName},
'lenderPerson' : {S: personName},
'objectStatus' : {S: 'lent'},
'transactionDate': {S: date},
},
ConditionExpression: 'attribute_not_exists(storedObject)'
};
console.log(params);
// putItem in database only if it doesn't already exist
dynamodb.putItem(params, function(err, data) {
if (err) {
console.log("Error", err);
console.log("That item already exists");
} else {
console.log("Success", data);
}
});
console.log(personName);
console.log(itemName);
console.log(currentUser);
const speechOutput = 'You Gave ' + personName + ' the ' + itemName;
return handlerInput.responseBuilder
.speak(speechOutput)
.withSimpleCard('Track It', speechOutput)
.getResponse();
}
};
const PutIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'PutIntent';
},
handle(handlerInput) {
const itemName = handlerInput.requestEnvelope.request.intent.slots.storedObject.value;
const LocationName = handlerInput.requestEnvelope.request.intent.slots.storageLocation.value;
const currentUser = handlerInput.requestEnvelope.session.user.userId;
//begin DB code
var params = {
TableName: 'TrackItDB',
Item: {
'userID' : {S: currentUser},
'storedObject' : {S: itemName},
'lenderPerson' : {S: LocationName},
'objectStatus' : {S: 'stored'},
'transactionDate': {S: date},
},
ConditionExpression: 'attribute_not_exists(storedObject)'
};
dynamodb.putItem(params, function(err, data) {
if (err) {
console.log("Error", err);
console.log("That item already exists");
}
else {
console.log("Success", data);
}
});
//end DB code
const speechOutput = 'You put the ' + itemName + ' in the ' + LocationName;
return handlerInput.responseBuilder
.speak(speechOutput)
.withSimpleCard('Hello World', speechOutput)
.getResponse();
}
};
const WhereIsIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'WhereIsIntent';
},
handle(handlerInput) {
const itemName = handlerInput.requestEnvelope.request.intent.slots.storedObject.value;
const currentUser = handlerInput.requestEnvelope.session.user.userId;
// begin DB query
var params = {
Key: {
"userID": {
S: currentUser
},
"storedObject": {
S: itemName
}
},
TableName: "TrackItDB"
};
// End DB Query
dynamodb.getItem(params, function(err, data) {
if (err) {
console.log("Error", err);
}// an error occurred
else {
console.log("Success", data); // successful response
const LocationName = data.Item.lenderPerson.S;
const speechOutput = 'Your ' + itemName + ' is in the ' + LocationName;
return handlerInput.responseBuilder
.speak(speechOutput)
.withSimpleCard('Hello World', speechOutput)
.getResponse();
}
});
}
};
const HelpIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& handlerInput.requestEnvelope.request.intent.name === 'AMAZON.HelpIntent';
},
handle(handlerInput) {
const speechOutput = 'Welcome to Track it, you can tell me who you lent things to, or where you put an item. For example I lent the saw to Mike, or I put the saw in the workshop.';
return handlerInput.responseBuilder
.speak(speechOutput)
.reprompt(speechOutput)
.withSimpleCard('Hello World', speechOutput)
.getResponse();
}
};
const CancelAndStopIntentHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'IntentRequest'
&& (handlerInput.requestEnvelope.request.intent.name === 'AMAZON.CancelIntent'
|| handlerInput.requestEnvelope.request.intent.name === 'AMAZON.StopIntent');
},
handle(handlerInput) {
const speechOutput = 'Goodbye!';
return handlerInput.responseBuilder
.speak(speechOutput)
.withSimpleCard('Hello World', speechOutput)
.getResponse();
}
};
const SessionEndedRequestHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'SessionEndedRequest';
},
handle(handlerInput) {
//any cleanup logic goes here
return handlerInput.responseBuilder.getResponse();
}
};
const skillBuilder = Alexa.SkillBuilders.custom();
exports.handler = skillBuilder
.addRequestHandlers(
LaunchRequestHandler,
GaveIntentHandler,
PutIntentHandler,
WhereIsIntentHandler,
HelpIntentHandler,
SessionEndedRequestHandler,
CancelAndStopIntentHandler
)
.lambda()
I'm trying to pass the "itemName and LocationName to the const speechOutput variable so I can have Alexa speak it for me.
It will work in the console log, but not later outside the getitem function. FYI, I know I shouldn't have the const speechOutput twice, and it isn't like that in my code; I just put it there to show what I'm trying to do.

You need to move the responseBuilder code snippet within the getItem function' else part.
Do not declare the const speechOutput twice.
handle(handlerInput) {
const itemName =
handlerInput.requestEnvelope.request.intent.slots.storedObject.value;
const currentUser =
handlerInput.requestEnvelope.session.user.userId;
// Begin database query
var params = {
Key: {
"userID": {
S: currentUser
},
"storedObject": {
S: itemName
}
},
TableName: "TrackItDB"
};
// End DB Query
//const speechOutput = 'test'
dynamodb.getItem(params, function(err, data) {
if (err) {
console.log("Error", err);
} // An error occurred
else {
console.log("Success", data); // Successful
// response
const LocationName = data.Item.lenderPerson.S;
const speechOutput = 'Your ' + itemName + ' is in the ' +
LocationName;
// Speak the output
return handlerInput.responseBuilder
.speak(speechOutput)
.withSimpleCard('Hello, World!', speechOutput)
.getResponse();
console.log(speechOutput);
}
});
//const speechOutput = 'Your ' + itemName + ' is in the ' +
LocationName;
}

Related

how to force a AWS Lambda to complete all the code

I have a lambda that is inserting a record into an Airtable db. This should be pretty straight forward and I have it working locally.
Once I put it into Lambda the record is not inserted. I have been messing with this for 2 days and can't figure out why the record won't load into Airtable.
The only other note that matters is that this lambda is long polling for SQS messages.
console.log('Loading function');
const Airtable = require('airtable');
const apiKey = process.env.AIRTABLE_API_KEY;
const BaseTable = process.env.AIRTABLE_BASE_TABLE;
var base = new Airtable({
apiKey: apiKey
}).base(BaseTable);
const table = base('Contacts');
exports.handler = async (event) => {
for (const {
messageId,
body
} of event.Records) {
console.log("iteration: " + iteration);
const data = JSON.parse(body);
var fields = {
"Full Name (First Middle Last)": data.firstname + ' ' + data.lastname,
"Email": data.email,
"Phone": data.phonenumber,
"Address": data.address_one + ' ' + data.address_two,
"City": data.city,
"State/Province": data.state,
"Postal Code": data.zip,
"Country": data.country,
"Birthday": data.dob,
};
insertRecord(fields);
}
console.log('before the end');
};
function insertRecord(fields) {
console.log('insert Record');
return new Promise((resolve, reject) => {
table.create([{
fields: fields
}], function(err, records) {
if (err) {
console.log('ERROR');
console.log(err);
reject(err);
} else {
console.log('SUCCESS');
resolve(records);
}
});
});
}
I have been googling, and reviewing, others' answers for multiple days now.

Firebase Function Returns Before All Callback functions complete execution

I'm using the Google Storage NodeJS client library to list GCS Bucket paths.
Here's the code to the Firebase Function:
import * as functions from 'firebase-functions';
import { Storage } from '#google-cloud/storage';
import { globVars } from '../admin/admin';
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
export const gcsListPath = functions
.region('europe-west2')
.runWith({ timeoutSeconds: 540, memory: '256MB' })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: '',
prefix,
};
if (pathList) {
options.delimiter = '/';
let test: any[] = [];
const callback = (_err: any, _files: any, nextQuery: any, apiResponse: any) => {
test = test.concat(apiResponse.prefixes);
console.log('test : ', test);
console.log('nextQuery : ', nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
}
storage.bucket(bucketName).getFiles(options, callback);
}
if (fileList) {
const [files] = await storage
.bucket(bucketName)
.getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: 'Bad Request', status: 'INVALID_ARGUMENT' },
};
}
});
My problem is that my Firebase function returns the list (null) before all the callback functions finish execution.
Could someone spot and point out what needs to be changed/added to make the function wait for all the callback functions to finish. I've tried adding async/await but can't seem to get it right.
The reason for your error is that you use a callback. It's not awaited in the code. I would recommend to turn the callback code to a promise. Something like this.
import * as functions from "firebase-functions";
import { Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName, options) => {
return new Promise((resolve, reject) => {
let list;
let test: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
test = test.concat(apiResponse.prefixes);
console.log("test : ", test);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
list = test;
}
resolve(list);
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(eror);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});
I'm not sure if the part with fileList will work as expectedt. It looks like the API doesn't support await but only callbacks.
import * as functions from "firebase-functions";
import { GetFilesOptions, Storage } from "#google-cloud/storage";
import { globVars } from "../admin/admin";
const projectId = process.env.GCLOUD_PROJECT;
// shared global variables setup
const { keyFilename } = globVars;
// Storage set up
const storage = new Storage({
projectId,
keyFilename,
});
const getList = (bucketName: string, options: GetFilesOptions) => {
return new Promise((resolve, reject) => {
// let test: any[] = [];
let list: any[] = [];
const callback = (
_err: any,
_files: any,
nextQuery: any,
apiResponse: any
) => {
list = list.concat(apiResponse.prefixes);
console.log("list : ", list);
console.log("nextQuery : ", nextQuery);
if (nextQuery) {
storage.bucket(bucketName).getFiles(nextQuery, callback);
} else {
// prefixes = The finished array of prefixes.
resolve(list);
}
};
try {
storage.bucket(bucketName).getFiles(options, callback);
} catch (error) {
reject(error);
}
});
};
export const gcsListPath = functions
.region("europe-west2")
.runWith({ timeoutSeconds: 540, memory: "256MB" })
.https.onCall(async (data, context) => {
if (context.auth?.token.email_verified) {
const { bucketName, prefix, pathList = false, fileList = false } = data;
let list;
const options = {
autoPaginate: false,
delimiter: "",
prefix,
};
if (pathList) {
options.delimiter = "/";
list = await getList(bucketName, options);
}
if (fileList) {
const [files] = await storage.bucket(bucketName).getFiles(options);
list = files.map((file) => file.name);
}
return { list }; //returning null as it exec before callback fns finish
} else {
return {
error: { message: "Bad Request", status: "INVALID_ARGUMENT" },
};
}
});

How come drive API return a result when using invalid access token

My Scenario
I'm using Google Drive API to create a file and to get a list of files.
My problem
1. No matter what value I put in my access_token the API keeps working
2. If I change the order of events and I call createDriveFile before I call listDriveFiles I get this error:
Error: Invalid Credentials
at Gaxios._request (/Users/tamirklein/superquery/bd/lambda/node_modules/googleapis-common/node_modules/google-auth-library/node_modules/gaxios/src/gaxios.ts:109:15)
at
at process._tickDomainCallback (internal/process/next_tick.js:228:7)
My code
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
import {google} from 'googleapis'
describe('Run query with API', async () => {
it('check Drive APIs', async () => {
process.env.x_region = 'us-east-1';
let result = await test('start')
})
async function test(p1) {
let auth = getBasicAuthObj();
auth.setCredentials({
access_token: "anyValueWork",
refresh_token: "Replace With a valid refresh Token"
});
let fileList = await listDriveFiles(auth);
let newFile = await createDriveFile(auth);
}
async function listDriveFiles(auth) {
return new Promise((resolved) => {
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
drive.files.list({
pageSize: 10,
fields: 'nextPageToken, files(id, name)',
q: 'trashed=false'
}, (err, res) => {
if (err) {
console.log('The API returned an error: ' + err);
resolved([err, null]);
} else {
const files = res.data.files;
if (files.length) {
console.log(`We fetched ${files.length} Files`);
// files.map((file) => {
// console.log(`${file.name} (${file.id})`);
// });
} else {
console.log('No files found.');
}
resolved([err, res]);
}
});
});
}
async function createDriveFile(auth) {
return new Promise(async (resolved) => {
//const fs = require('fs');
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
let data = {
value: 'value'
};
let fileName = 'fileName.txt';
let fileMetadata = {
'name': fileName
};
// create buffer
let stream = require('stream');
let bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(JSON.stringify(data)));
let media = {
mimeType: 'application/json',
body: bufferStream // fs.createReadStream("test.txt") //bufferStream //
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error("Error: savePasswordInDrive" + err);
} else {
console.log('File Id: ', file.data.id);
}
resolved([err, file]);
});
})
}
async function _wait(milliseconds) {
return new Promise(resolved => {
setTimeout(() => {
resolved()
}, milliseconds)
})
}
/**
* Create oAuth object
* #returns {OAuth2Client}
*/
function getBasicAuthObj() {
let clientId = 'Replace With a valid clientId';
let clientSecret = 'Replace With a valid clientSecret';
let redirectUrl = 'URL';
return new google.auth.OAuth2(
clientId,
clientSecret,
redirectUrl
)
}
})
Any ideas on how to resolve this?

GraphQL relay connectionFromArraySlice

There isn't any documentation for how the array meta info (arrayLength and sliceStart) should be implemented using facebook's graphql-relay-js helper library.
https://github.com/graphql/graphql-relay-js/issues/199
I managed to get it to work using the following implemention however I am guessing there is an easier/more correct way to do this.
Retrieve rows and row count from database
function transformRole(role: Role) {
return { ...role, roleId: role.id };
}
async function getRolesSlice({ roleId, after, first, last, before }: any): Promise<[Role[], number]> {
const queryBuilder = repository.createQueryBuilder();
if (roleId !== undefined) {
queryBuilder.where('id = :roleId', { roleId });
}
if (before) {
const beforeId = cursorToOffset(before);
queryBuilder.where('id < :id', { id: beforeId });
}
if (after) {
const afterId = cursorToOffset(after);
queryBuilder.where({
id: MoreThan(Number(afterId))
});
}
if (first === undefined && last === undefined) {
queryBuilder.orderBy('id', 'ASC');
}
if (first) {
queryBuilder.orderBy('id', 'ASC').limit(first);
}
if (last) {
queryBuilder.orderBy('id', 'DESC').limit(last);
}
return Promise.all([
queryBuilder.getMany()
.then(roles => roles.map(transformRole)),
repository.count() // Total number of roles
]);
}
Roles resolver
resolve: (_, args) =>
getRolesSlice(args)
.then(([results, count]) => {
const firstId = results[0] && results[0].roleId;
let sliceStart = 0;
if (args.first) {
sliceStart = firstId;
}
if (args.last) {
sliceStart = Math.max(firstId - args.last, 0);
}
if (args.after && args.last) {
sliceStart += 1;
}
return connectionFromArraySlice(
results,
args,
{
arrayLength: count + 1,
sliceStart
}
);
})
},
Edit:
This is what I came up with which is a little cleaner and seems to be working correctly.
const initialize = () => {
repository = getConnection().getRepository(Role);
}
function transformRole(role: Role) {
return { ...role, roleId: role.id };
}
function getRolesSlice(args: any):
Promise<[
Role[],
any,
{ arrayLength: number; sliceStart: number; }
]> {
if (!repository) initialize();
const { roleId, after, first, last, before } = args;
const queryBuilder = repository.createQueryBuilder();
if (roleId !== undefined) {
queryBuilder.where('id = :roleId', { roleId });
}
if (before !== undefined) {
const beforeId = cursorToOffset(before);
queryBuilder.where({
id: LessThan(beforeId)
});
}
if (after !== undefined) {
const afterId = cursorToOffset(after);
queryBuilder.where({
id: MoreThan(Number(afterId))
});
}
if (first !== undefined) {
queryBuilder.orderBy('id', 'ASC').limit(first);
} else if (last !== undefined) {
queryBuilder.orderBy('id', 'DESC').limit(last);
} else {
queryBuilder.orderBy('id', 'ASC');
}
return Promise.all([
queryBuilder.getMany()
.then(roles => roles.map(transformRole))
.then(roles => last !== undefined ? roles.slice().reverse() : roles),
repository.count()
]).then(([roles, totalCount]) =>
[
roles,
args,
{
arrayLength: totalCount + 1,
sliceStart: roles[0] && roles[0].roleId
}
]
);
}
// Resolver
roles: {
type: rolesConnection,
args: {
...connectionArgs,
roleId: {
type: GraphQLString
}
},
resolve: (_, args) =>
getRolesSlice(args)
.then((slice) => connectionFromArraySlice(...slice))
},

Graphql-js subscriptions unit tests not working as expected

I have written integration tests for graphql-js subscriptions, which are showing weird behavior.
My graphq-js subscription works perfectly in GraphiQL. But when the same subscriptions is called from unit test, it fails.
Ggraphql-Js object, with resolve function and subscribe function
return {
type: outputType,
args: {
input: {type: new GraphQLNonNull(inputType)},
},
resolve(payload, args, context, info) {
const clientSubscriptionId = (payload) ? payload.subscriptionId : null;
const object = (payload) ? payload.object : null;
var where = null;
var type = null;
var target = null;
if (object) {
where = (payload) ? payload.object.where : null;
type = (payload) ? payload.object.type : null;
target = (payload) ? payload.object.target : null;
}
return Promise.resolve(subscribeAndGetPayload(payload, args, context, info))
.then(payload => ({
clientSubscriptionId, where, type, target, object: payload.data,
}));
},
subscribe: withFilter(
() => pubSub.asyncIterator(modelName),
(payload, variables, context, info) => {
const subscriptionPayload = {
clientSubscriptionId: variables.input.clientSubscriptionId,
remove: variables.input.remove,
create: variables.input.create,
update: variables.input.update,
opts: variables.input.options,
};
subscriptionPayload.model = model;
try {
pubSub.subscribe(info.fieldName, null, subscriptionPayload);
} catch (ex) {
console.log(ex);
}
return true;
}
),
};
Subscription query
subscription {
Customer(input: {create: true, clientSubscriptionId: 112}) {
customer {
id
name
age
}
}
}
Mutation query
mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 50", age:50}}) {
obj {
id
name
}
}
}
}
Integration Test
'use strict';
const ws = require('ws');
const { SubscriptionClient } = require('subscriptions-transport-ws');
const { ApolloClient } = require('apollo-client');
const { HttpLink } = require('apollo-link-http');
const { InMemoryCache } = require('apollo-cache-inmemory');
const Promise = require('bluebird');
const expect = require('chai').expect;
const chai = require('chai').use(require('chai-http'));
const server = require('../server/server');
const gql = require('graphql-tag');
let apollo;
let networkInterface;
const GRAPHQL_ENDPOINT = 'ws://localhost:5000/subscriptions';
describe('Subscription', () => {
before(async () => {
networkInterface = new SubscriptionClient(
GRAPHQL_ENDPOINT, { reconnect: true }, ws);
apollo = new ApolloClient({
networkInterface ,
link: new HttpLink({ uri: 'http://localhost:3000/graphql' }),
cache: new InMemoryCache()
});
});
after(done => {
networkInterface.close() ;
});
it('subscription', async () => {
const client = () => apollo;
// SUBSCRIBE and make a promise
const subscriptionPromise = new Promise((resolve, reject) => {
client().subscribe({
query: gql`
subscription {
Customer(input: {create: true,
clientSubscriptionId: 112,
options: {where: {age: 50}}}) {
customer {
name
}
}
}
`
}).subscribe({
next: resolve,
error: reject
});
});
let execGraphQL;
// MUTATE
await execGraphQL(
`mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 21", age:50}}) {
obj {
id
name
}
}
}
}`
);
// ASSERT SUBSCRIPTION RECEIVED EVENT
expect(await subscriptionPromise).to.deep.equal({});
});
});
Issue Here
When test in run, payload in the resolve function contains global data, where as it should contain the subscription payload. So the code breaks.

Resources