Trying to integrate serverless-local-authorizers-plugin to a system that uses AWS Lambda Authorizer. When I print the response it does contain a principalId and I return it in an object exactly like the online lambda response does. Any ideas why I am getting the error
Serverless: Warning: No principalId in response?
How it was returned:
{
"principalId":"user",
"policyDocument":{
"Version":"2012-10-17",
"Statement":[{
"Action":"execute-api:Invoke",
"Effect":"Deny",
"Resource":"arn:aws:execute-api:eu-central-1:my-AWS-ID:*"}
]},
"context":{
"platformRoles":"Guest",
"userId":"unknown"
}
}
local auth proxy function
const AWS = require('aws-sdk');
const mylocalAuthProxyFn = async (event, context) => {
const lambda = new AWS.Lambda();
const req = {
FunctionName: 'my-lambda-function-name',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(event)
};
const results = await lambda.invoke(req).promise();
if (results.StatusCode === 200) {
return results.Payload;
}
throw new Error('Unauthorized');
};
module.exports = { mylocalAuthProxyFn };
Here is what I have figured out the works. After speaking to the library owenrs, the response has to be an explicit object in my case for some reason. They claim you can send the references variable of the promise but it didn't work for me. But the below does:
const AWS = require('aws-sdk');
const mylocalAuthProxyFn = async (event, context) => {
const lambda = new AWS.Lambda();
const req = {
FunctionName: 'aidonic-endpoints-dev-createAuthorization',
InvocationType: 'RequestResponse',
Payload: JSON.stringify(event)
};
const result = await lambda.invoke(req).promise();
if (result.StatusCode === 200) {
const pl = JSON.parse(result.Payload);
return {
principalId: pl.principalId,
policyDocument: {
Version: '2012-10-17',
Statement: [
{
Action: 'execute-api:Invoke',
Effect: 'Allow',
Resource: '*'
}
]
},
context: {
platformRoles: 'Verified Organization Representative,Registered User',
userId: '23e8320fcechi042389h02ijwqwd'
}
};
}
throw new Error('Unauthorized');
};
module.exports = { mylocalAuthProxyFn };
Related
I use nextjs with elastic search cloud, also use amplify and lambda created from amplify rest api.
This is my function:
export async function getServerSideProps(context) {
let esItems;
try {
const { query } = context.query;
const apiName = 'APINAME';
const path = '/searchlist';
const myInit = {
response: true,
queryStringParameters: { query: query }
};
const response = await API.get(apiName, path, myInit);
esItems = response;
} catch (err) {
console.log(err)
}
return {
props: {
allProducts: esItems ? esItems.data.items : [],
}
};
}
I return 50 products from elastic and get this error:
502 ERROR
The request could not be satisfied.
The Lambda function returned invalid JSON: The JSON output is not parsable. We can't connect to the server for this app or website at this time. There might be too much traffic or a configuration error. Try again later, or contact the app or website owner.
If you provide content to customers through CloudFront, you can find steps to troubleshoot and help prevent this error by reviewing the CloudFront documentation.
This is lambda function:
app.get('/searchlist', async (req, res) => {
const { query } = req.query;
const client = new Client({
node: "https://elastic-cloud....",
auth: {
username: process.env.USERNAME,
password: process.env.PASSWORD
}
});
const searchQuery = {
query: {
multi_match: {
query: query,
type: "phrase",
fields: [
'manufacturerTypeDescription^8',
'manufacturerName^6',
'ean^4',
'_id^2',
]
}
}
}
const typeSearch = {
index: "product",
size: 50,
body: searchQuery
}
const r = await client.search(typeSearch);
const hits = r.body.hits;
const items = hits.hits.map((hit) => ({
_id: hit._id,
...hit._source,
}))
res.json({
success: 'get call succeed!',
items
});
});
I have two DynamoDB tables: Order and Client.
When an Order is created, a Lambda function is triggered and first seeks to read the Client associated with the Order. However, the Lambda function fails to do so and does not report any error.
The Lambda function is:
var AWS = require('aws-sdk');
AWS.config.update({region: process.env.REGION})
docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
await event.Records.forEach( async (record) => {
const { eventName, dynamodb } = record
if(eventName==="INSERT") {
console.log('A new order has been created')
const client_id = dynamodb.NewImage.client_id.S
let params = {
TableName: process.env.API_REALAPI_CLIENTTABLE_NAME,
Key: {
"id": client_id
}
}
console.log(params)
try {
const result = await docClient.get(params).promise()
console.log(result)
} catch(err) { console.log(err); throw new Error(err); }
}
})
}
When I run this, it gets to console.log(params) but then doesn't show any result nor any error.
My function was configured with the amplify cli and it appears to have read permissions to the Client table:
"Statement": [
{
"Action": [
"dynamodb:Put*",
"dynamodb:Create*",
"dynamodb:BatchWriteItem",
"dynamodb:Get*",
"dynamodb:BatchGetItem",
"dynamodb:List*",
"dynamodb:Describe*",
"dynamodb:Scan",
"dynamodb:Query",
"dynamodb:Update*",
"dynamodb:RestoreTable*",
"dynamodb:Delete*"
],
"Resource": [
"arn:aws:dynamodb:us-east-1:123456123456:table/Client-abcdefg-dev",
"arn:aws:dynamodb:us-east-1:123456123456:table/Client-abcdefg-dev/index/*"
],
"Effect": "Allow"
},
{
Am I missing something?
Thanks!
The issue is that you are using await in a forEach loop which doesn't work. Your handler is existing before the Db call is done which is why there is no response. You will want to do something more like this:
exports.handler = async (event) => {
const jobs = event.Records.map((record) => {
const { eventName, dynamodb } = record
if (eventName === 'INSERT') {
console.log('A new order has been created')
const client_id = dynamodb.NewImage.client_id.S
let params = {
TableName: process.env.API_REALAPI_CLIENTTABLE_NAME,
Key: {
id: client_id
}
}
console.log(params)
return docClient.get(params).promise()
}
})
try {
const results = await Promise.all(jobs)
} catch (err) {
console.log(err)
throw new Error(err)
}
}
My Scenario
I'm using Google Drive API to create a file and to get a list of files.
My problem
1. No matter what value I put in my access_token the API keeps working
2. If I change the order of events and I call createDriveFile before I call listDriveFiles I get this error:
Error: Invalid Credentials
at Gaxios._request (/Users/tamirklein/superquery/bd/lambda/node_modules/googleapis-common/node_modules/google-auth-library/node_modules/gaxios/src/gaxios.ts:109:15)
at
at process._tickDomainCallback (internal/process/next_tick.js:228:7)
My code
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
import {google} from 'googleapis'
describe('Run query with API', async () => {
it('check Drive APIs', async () => {
process.env.x_region = 'us-east-1';
let result = await test('start')
})
async function test(p1) {
let auth = getBasicAuthObj();
auth.setCredentials({
access_token: "anyValueWork",
refresh_token: "Replace With a valid refresh Token"
});
let fileList = await listDriveFiles(auth);
let newFile = await createDriveFile(auth);
}
async function listDriveFiles(auth) {
return new Promise((resolved) => {
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
drive.files.list({
pageSize: 10,
fields: 'nextPageToken, files(id, name)',
q: 'trashed=false'
}, (err, res) => {
if (err) {
console.log('The API returned an error: ' + err);
resolved([err, null]);
} else {
const files = res.data.files;
if (files.length) {
console.log(`We fetched ${files.length} Files`);
// files.map((file) => {
// console.log(`${file.name} (${file.id})`);
// });
} else {
console.log('No files found.');
}
resolved([err, res]);
}
});
});
}
async function createDriveFile(auth) {
return new Promise(async (resolved) => {
//const fs = require('fs');
const {google} = require('googleapis');
const drive = google.drive({version: 'v3', auth});
let data = {
value: 'value'
};
let fileName = 'fileName.txt';
let fileMetadata = {
'name': fileName
};
// create buffer
let stream = require('stream');
let bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(JSON.stringify(data)));
let media = {
mimeType: 'application/json',
body: bufferStream // fs.createReadStream("test.txt") //bufferStream //
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error("Error: savePasswordInDrive" + err);
} else {
console.log('File Id: ', file.data.id);
}
resolved([err, file]);
});
})
}
async function _wait(milliseconds) {
return new Promise(resolved => {
setTimeout(() => {
resolved()
}, milliseconds)
})
}
/**
* Create oAuth object
* #returns {OAuth2Client}
*/
function getBasicAuthObj() {
let clientId = 'Replace With a valid clientId';
let clientSecret = 'Replace With a valid clientSecret';
let redirectUrl = 'URL';
return new google.auth.OAuth2(
clientId,
clientSecret,
redirectUrl
)
}
})
Any ideas on how to resolve this?
Hello I have the following Lambda code based on Node.js. I am unable to call the query method to retrieve the data.
Input: { key: 'value' } -- Using Test Event Input.
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-2'});
var ddb = new AWS.DynamoDB.DocumentClient({apiVersion: '2012-08-10'});
exports.handler = async (event, context) => {
const params = {
TableName: 'Chapter',
IndexName: 'subjectId-index', // Global Secondary Index
KeyConditionExpression: 'subjectId = :subjectId',
ExpressionAttributeValues: {
':subjectId': event.key,
},
};
console.log(JSON.stringify(params));
ddb.query(params, (err, data) => {
// Console.log('Not executing this part !!!')
if (err) console.log(err);
console.log(data);
});
};
The query method not even being called or any error being logged. I really could use some help.
As you have console logs, you can debug your code using cloudwatch to check your lambda logs.
You can select your lambda and if you are seeing any execution logs you can click for more details and you will find your console logs there
Call context.done
Sample program you might try. I just wrote it not tried
var AWS = require('aws-sdk');
AWS.config.update({ region: 'us-east-2' });
var ddb = new AWS.DynamoDB.DocumentClient({ apiVersion: '2012-08-10' });
const executeQuery = (params) => {
return new Promise(function (resolve, reject) {
ddb.query(params, (err, data) => {
if (err) reject(err);
resolve(data);
});
});
}
exports.handler = async (event, context) => {
const params = {
TableName: 'Chapter',
IndexName: 'subjectId-index', // Global Secondary Index
KeyConditionExpression: 'subjectId = :subjectId',
ExpressionAttributeValues: {
':subjectId': event.key,
},
};
console.log(JSON.stringify(params));
executeQuery(params).then((data) => {
console.log('sucess');
context.done(data);
}).catch((err) => {
console.log('Error occurred');
context.done(error);
})
};
I have written integration tests for graphql-js subscriptions, which are showing weird behavior.
My graphq-js subscription works perfectly in GraphiQL. But when the same subscriptions is called from unit test, it fails.
Ggraphql-Js object, with resolve function and subscribe function
return {
type: outputType,
args: {
input: {type: new GraphQLNonNull(inputType)},
},
resolve(payload, args, context, info) {
const clientSubscriptionId = (payload) ? payload.subscriptionId : null;
const object = (payload) ? payload.object : null;
var where = null;
var type = null;
var target = null;
if (object) {
where = (payload) ? payload.object.where : null;
type = (payload) ? payload.object.type : null;
target = (payload) ? payload.object.target : null;
}
return Promise.resolve(subscribeAndGetPayload(payload, args, context, info))
.then(payload => ({
clientSubscriptionId, where, type, target, object: payload.data,
}));
},
subscribe: withFilter(
() => pubSub.asyncIterator(modelName),
(payload, variables, context, info) => {
const subscriptionPayload = {
clientSubscriptionId: variables.input.clientSubscriptionId,
remove: variables.input.remove,
create: variables.input.create,
update: variables.input.update,
opts: variables.input.options,
};
subscriptionPayload.model = model;
try {
pubSub.subscribe(info.fieldName, null, subscriptionPayload);
} catch (ex) {
console.log(ex);
}
return true;
}
),
};
Subscription query
subscription {
Customer(input: {create: true, clientSubscriptionId: 112}) {
customer {
id
name
age
}
}
}
Mutation query
mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 50", age:50}}) {
obj {
id
name
}
}
}
}
Integration Test
'use strict';
const ws = require('ws');
const { SubscriptionClient } = require('subscriptions-transport-ws');
const { ApolloClient } = require('apollo-client');
const { HttpLink } = require('apollo-link-http');
const { InMemoryCache } = require('apollo-cache-inmemory');
const Promise = require('bluebird');
const expect = require('chai').expect;
const chai = require('chai').use(require('chai-http'));
const server = require('../server/server');
const gql = require('graphql-tag');
let apollo;
let networkInterface;
const GRAPHQL_ENDPOINT = 'ws://localhost:5000/subscriptions';
describe('Subscription', () => {
before(async () => {
networkInterface = new SubscriptionClient(
GRAPHQL_ENDPOINT, { reconnect: true }, ws);
apollo = new ApolloClient({
networkInterface ,
link: new HttpLink({ uri: 'http://localhost:3000/graphql' }),
cache: new InMemoryCache()
});
});
after(done => {
networkInterface.close() ;
});
it('subscription', async () => {
const client = () => apollo;
// SUBSCRIBE and make a promise
const subscriptionPromise = new Promise((resolve, reject) => {
client().subscribe({
query: gql`
subscription {
Customer(input: {create: true,
clientSubscriptionId: 112,
options: {where: {age: 50}}}) {
customer {
name
}
}
}
`
}).subscribe({
next: resolve,
error: reject
});
});
let execGraphQL;
// MUTATE
await execGraphQL(
`mutation {
Customer {
CustomerCreate (input:{data:{name:"Atif 21", age:50}}) {
obj {
id
name
}
}
}
}`
);
// ASSERT SUBSCRIPTION RECEIVED EVENT
expect(await subscriptionPromise).to.deep.equal({});
});
});
Issue Here
When test in run, payload in the resolve function contains global data, where as it should contain the subscription payload. So the code breaks.