Closed. This question needs details or clarity. It is not currently accepting answers.
Want to improve this question? Add details and clarify the problem by editing this post.
Closed 2 years ago.
Improve this question
enter image description hereHow to integrate amazon quick sight in to react application
1.how to import aws-sdk
2.where to get the dashboard ID
You are going to need to get a url for your quicksight graphs whether that is from aws or whether you are using the sdk in your api. I'm currently getting a new url from my nodejs api.
const AWS = require("aws-sdk");
exports.url_aws = (req, res) => {
let awsCredentials = {
region: "us-east-1",
accessKeyId: process.env.KEY_ID,
secretAccessKey: process.env.ACCESS_KEY
};
AWS.config.update(awsCredentials);
let params = {
RoleArn: "<the arn role and iam id>",
RoleSessionName: "embeddingsession",
};
let sts = new AWS.STS({
apiVersion: "2011-06-15"
});
sts.assumeRole(paras, (err, data) => {
if (err) {
console.log(err); // account for error;
res.end();
}
AWS.config.update({
accessKeyId: data.Credentials.AccessKeyId,
secretAccessKey: data.Credentials.SecretAccessKey,
sessionToken: data.Credentials.SessionToken
});
AWS.config.update({
region: "<your region>"
});
let quicksight = new AWS.QuickSight({
apiVersion: '2018-04-01',
region: '<Your region>'
});
let getdashboardparams = {
AwsAccountId: "<Your account Id",
DashboardId: "<Dashboard Id you want to display",
IdentityType: "QUICKSIGHT",
ResetDisable: false, // or true, what ever you prefer
SessionLifeTimeInMinutes: "<writes the minutes inside the quotes",
UndoRedoDisabled: false, // or true...
UserArn: "<the user arn>"
};
quicksight.getDashboardEmbedUrl(getdashboardparams, (err, data) => {
if (err) console.log("Quicksight GetDashboard Error",err, err.stack);
else console.log(data);
res.json(data);
console.log("\nEND \n")
res.end();
return;
});
});
};
That's how I have it setup in my backend, in the front-end in react, I used react-iframe.
https://www.npmjs.com/package/react-iframe
And basically just put the url in the url prop.
Hope this helps.
Related
So I added a lambda function category using the amplify CLI, in order to query data from the GSI(Global secondary Index) I created using the #key directive in the graphql schema. Whenever I try mocking the function locally using the amplify mock function <functionName> the callback function of the query keeps on returning null. The function can be seen below
const AWS = require("aws-sdk");
const db = new AWS.DynamoDB.DocumentClient({
region: process.env.REGION,
apiVersion: "2012-08-10",
});
const params = {
// ProjectionExpression: ["province", "gender", "updatedAt", "createdAt"],
ExpressionAttributeValues: {
":provinceVal": "Sichuan",
},
IndexName: "RegistreesByProvince",
KeyConditionExpression: "province = :provinceVal",
TableName: process.env.API_PORTAL_SUBMISSIONSTABLE_NAME,
};
const calculateStatistics = async () => {
try {
const data = await db.query(params).promise();
console.log(data);
} catch (err) {
console.log(err);
}
};
const resolvers = {
Query: {
getStatistics: () => {
return calculateStatistics();
},
},
};
exports.handler = async (event) => {
// TODO implement
const typeHandler = resolvers[event.typeName];
if (typeHandler) {
const resolver = typeHandler[event.fieldName];
if (resolver) {
var result = await resolver(event);
return result;
}
}
}; // };
I then tried to capture the whole event and logged it to the console as can be seen in the calculateStatistics function, which now showed me a bit more explicit error as follows.
{ UnknownEndpoint: Inaccessible host: `dynamodb.us-east-1-fake.amazonaws.com'. This service may not be available in the `us-east-1-fake' region.
at Request.ENOTFOUND_ERROR (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/event_listeners.js:501:46)
at Request.callListeners (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/sequential_executor.js:106:20)
at Request.emit (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/sequential_executor.js:78:10)
at Request.emit (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/request.js:688:14)
at ClientRequest.error (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/event_listeners.js:339:22)
at ClientRequest.<anonymous> (/Users/apple/Documents/work/web/portal/amplify/backend/function/calcStatistics/src/node_modules/aws-sdk/lib/http/node.js:96:19)
at ClientRequest.emit (events.js:198:13)
at ClientRequest.EventEmitter.emit (domain.js:448:20)
at TLSSocket.socketErrorListener (_http_client.js:401:9)
at TLSSocket.emit (events.js:198:13)
message:
'Inaccessible host: `dynamodb.us-east-1-fake.amazonaws.com\'. This service may not be available in the `us-east-1-fake\' region.',
code: 'UnknownEndpoint',
region: 'us-east-1-fake',
hostname: 'dynamodb.us-east-1-fake.amazonaws.com',
retryable: true,
originalError:
{ Error: getaddrinfo ENOTFOUND dynamodb.us-east-1-fake.amazonaws.com dynamodb.us-east-1-fake.amazonaws.com:443
at GetAddrInfoReqWrap.onlookup [as oncomplete] (dns.js:56:26)
message:
'getaddrinfo ENOTFOUND dynamodb.us-east-1-fake.amazonaws.com dynamodb.us-east-1-fake.amazonaws.com:443',
errno: 'ENOTFOUND',
code: 'NetworkingError',
syscall: 'getaddrinfo',
hostname: 'dynamodb.us-east-1-fake.amazonaws.com',
host: 'dynamodb.us-east-1-fake.amazonaws.com',
port: 443,
region: 'us-east-1-fake',
retryable: true,
time: 2020-08-12T10:18:08.321Z },
time: 2020-08-12T10:18:08.321Z }
Result:
null
Finished execution.
I then did more research and came across this thread about inaccessible-dynamodb-host-when-running-amplify-mock which I followed and tried implementing to but to no avail. Any help on this would be very much appreciated.
PS: It is worth mentioning that I was able to successfully query for this data through the Appsync console, which led me to strongly believe the problem lies in the function itself.
After doing more research and asking around, I finally made sense of the answer that was provided to me on github that
When running mock on a function which has access to a dynamodb
table generated by API. It will populate the env with fake values. If
you would like to mock your lambda function against your deployed
dynamodb table you can edit the values in the sdk client so it can
make the call accurately.
In summary, if you are running things locally, then you wouldn't have access to your backend variables which you might try mocking. I hope this helps someone. Thanks!
I'm trying to deploy an S3 static website and API gateway/lambda in a single stack.
The javascript in the S3 static site calls the lambda to populate an HTML list but it needs to know the API Gateway URL for the lambda integration.
Currently, I generate a RestApi like so...
const handler = new lambda.Function(this, "TestHandler", {
runtime: lambda.Runtime.NODEJS_10_X,
code: lambda.Code.asset("build/test-service"),
handler: "index.handler",
environment: {
}
});
this.api = new apigateway.RestApi(this, "test-api", {
restApiName: "Test Service"
});
const getIntegration = new apigateway.LambdaIntegration(handler, {
requestTemplates: { "application/json": '{ "statusCode": "200" }' }
});
const apiUrl = this.api.url;
But on cdk deploy, apiUrl =
"https://${Token[TOKEN.39]}.execute-api.${Token[AWS::Region.4]}.${Token[AWS::URLSuffix.1]}/${Token[TOKEN.45]}/"
So the url is not parsed/generated until after the static site requires the value.
How can I calculate/find/fetch the API Gateway URL and update the javascript on cdk deploy?
Or is there a better way to do this? i.e. is there a graceful way for the static javascript to retrieve a lambda api gateway url?
Thanks.
You are creating a LambdaIntegration but it isn't connected to your API.
To add it to the root of the API do: this.api.root.addMethod(...) and use this to connect your LambdaIntegration and API.
This should give you an endpoint with a URL
If you are using the s3-deployment module to deploy your website as well, I was able to hack together a solution using what is available currently (pending a better solution at https://github.com/aws/aws-cdk/issues/12903). The following together allow for you to deploy a config.js to your bucket (containing attributes from your stack that will only be populated at deploy time) that you can then depend on elsewhere in your code at runtime.
In inline-source.ts:
// imports removed for brevity
export function inlineSource(path: string, content: string, options?: AssetOptions): ISource {
return {
bind: (scope: Construct, context?: DeploymentSourceContext): SourceConfig => {
if (!context) {
throw new Error('To use a inlineSource, context must be provided');
}
// Find available ID
let id = 1;
while (scope.node.tryFindChild(`InlineSource${id}`)) {
id++;
}
const bucket = new Bucket(scope, `InlineSource${id}StagingBucket`, {
removalPolicy: RemovalPolicy.DESTROY
});
const fn = new Function(scope, `InlineSource${id}Lambda`, {
runtime: Runtime.NODEJS_12_X,
handler: 'index.handler',
code: Code.fromAsset('./inline-lambda')
});
bucket.grantReadWrite(fn);
const myProvider = new Provider(scope, `InlineSource${id}Provider`, {
onEventHandler: fn,
logRetention: RetentionDays.ONE_DAY // default is INFINITE
});
const resource = new CustomResource(scope, `InlineSource${id}CustomResource`, { serviceToken: myProvider.serviceToken, properties: { bucket: bucket.bucketName, path, content } });
context.handlerRole.node.addDependency(resource); // Sets the s3 deployment to depend on the deployed file
bucket.grantRead(context.handlerRole);
return {
bucket: bucket,
zipObjectKey: 'index.zip'
};
},
};
}
In inline-lambda/index.js (also requires archiver installed into inline-lambda/node_modules):
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
const fs = require('fs');
var archive = require('archiver')('zip');
exports.handler = async function(event, ctx) {
await new Promise(resolve => fs.unlink('/tmp/index.zip', resolve));
const output = fs.createWriteStream('/tmp/index.zip');
const closed = new Promise((resolve, reject) => {
output.on('close', resolve);
output.on('error', reject);
});
archive.pipe(output);
archive.append(event.ResourceProperties.content, { name: event.ResourceProperties.path });
archive.finalize();
await closed;
await s3.upload({Bucket: event.ResourceProperties.bucket, Key: 'index.zip', Body: fs.createReadStream('/tmp/index.zip')}).promise();
return;
}
In your construct, use inlineSource:
export class TestConstruct extends Construct {
constructor(scope: Construct, id: string, props: any) {
// set up other resources
const source = inlineSource('config.js', `exports.config = { apiEndpoint: '${ api.attrApiEndpoint }' }`);
// use in BucketDeployment
}
}
You can move inline-lambda elsewhere but it needs to be able to be bundled as an asset for the lambda.
This works by creating a custom resource that depends on your other resources in the stack (thereby allowing for the attributes to be resolved) that writes your file into a zip that is then stored into a bucket, which is then picked up and unzipped into your deployment/destination bucket. Pretty complicated but gets the job done with what is currently available.
The pattern I've used successfully is to put a CloudFront distribution or an API Gateway in front of the S3 bucket.
So requests to https://[api-gw]/**/* are proxied to https://[s3-bucket]/**/*.
Then I will create a new Proxy path in the same API gateway, for the route called /config which is a standard Lambda-backed API endpoint, where I can return all sorts of things like branding information or API keys to the frontend, whenever the frontend calls GET /config.
Also, this avoids issues like CORS, because both origins are the same (the API Gateway domain).
With CloudFront distribution instead of an API Gateway, it's pretty much the same, except you use the CloudFront distribution's "origin" configuration instead of paths and methods.
I am trying to get the list of uploads in the AWS Device Farm. I tried to use the method "devicefarm.listUploads" from Lambda.
I am facing an issue when I am trying to filter it by type.
var uploadList = devicefarm.listUploads({ arn: deviceFarmProject, type: 'IOS_APP' }).promise()
.then(res => res)
.catch(err => err);
The expectation is to get data about all the iOS apps, but getting the bellow error.
{
"message": "Unexpected key 'type' found in params",
"code": "UnexpectedParameter",
"time": "2019-05-02T15:49:35.351Z"
}
~~I'm not sure why the type isn't recognized here~~
[Edit]
The reason for this error is due to the version of the aws-sdk in AWS Lambda.
https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html
Node.js Runtimes
Name Identifier AWS SDK for JavaScript
Node.js 8.10
nodejs8.10
2.290.0
I created a Lambda layer with the following commands and applied it to my function through the web console.
npm init
npm install aws-sdk
mkdir nodejs
cp -r node-modules nodejs
zip -r aws-sdk-layer.zip nodejs
note the zip file structure needs to match the Lambda documentation example.
https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path
Node.js – nodejs/node_modules, nodejs/node8/node_modules (NODE_PATH)
Example AWS X-Ray SDK for Node.js
xray-sdk.zip
└ nodejs/node_modules/aws-xray-sdk
after I applied the layer I was able to execute the function successfully.
but I used the following and it seemed to work though I didn't have any iOS uploads.
// assume we already executed `npm install aws-sdk`
var AWS = require('aws-sdk');
// Device Farm is only available in the us-west-2 region
var devicefarm = new AWS.DeviceFarm({ region: 'us-west-2' });
var params = {};
devicefarm.listProjects(params, function (err, projects) {
if (err) console.log(err, err.stack); // an error occurred
else{
project = projects.projects[0];
console.log("project: ", project);
uploadList = devicefarm.listUploads({ arn: project.arn, type: 'IOS_APP' }).promise()
.then(function(uploadList){
console.log("uploads: ",uploadList);
})
.catch(err => err);
}
});
code I executed in Lambda
// assume we already executed `npm install aws-sdk`
var AWS = require('aws-sdk');
// Device Farm is only available in the us-west-2 region
var devicefarm = new AWS.DeviceFarm({ region: 'us-west-2' });
exports.handler = async (event) => {
return new Promise(function (resolve, reject) {
var params = {};
devicefarm.listProjects(params, function (err, projects) {
if (err) reject(err); // an error occurred
else {
var project = projects.projects[0];
console.log("project: ", project);
resolve(project);
}
});
}).then(function(data){
console.log("in then function","data: ",data);
return new Promise(function(resolve,reject){
devicefarm.listUploads({ arn: data.arn, type: 'IOS_APP' }, function(err,uploads){
if (err) reject(err); // an error occurred
else {
resolve(uploads);
}
})
}).then(function(data){
console.log("uploads: ", data);
return data;
}).catch(function(data){
console.error("list uploads failed","data: ", data);
return data;
});
}).catch(function(data){
console.error("list projects failed","data: ",data);
return data;
});
};
It might be the case that the aws-sdk version in Lambda isn't up to date in which case you would need to apply a Lambda layer or include the aws-sdk in the code package.
Locally I executed this code and it provided the following output:
node sample.js
project: { arn: 'arn:aws:devicefarm:us-west-2:111122223333:project:00ec5d2a-9170-4c52-b65e-0e12986e4fc3',
name: 'web_app_test',
created: 2019-02-10T22:47:35.052Z }
uploads: { uploads: [] }
aws-sdk version: aws-sdk#2.448.0
node --version
v8.12.0
HTH
James
I’m having a problem with image uploads with the Cloudinary API.
I have the app running on Heroku. I’m using Node for my backend. The app runs fine, until a user tries to post an image. I then get the following error message:
Invalid Signature ******************************. String to
sign - 'timestamp=.
I used the same setup in another app, and it works fine. I’ve followed some stack overflow threads on the problem, but I’m not getting a useful answer that I understand.
I’ve set up the environment variables in Heroku the same way I did on another app, and it works. I’ve also installed the Cloudinary and Multer packages in my package.json file.
Any ideas what I’m doing wrong here?
Below is my code:
var multer = require('multer');
var storage = multer.diskStorage({
filename: function(req, file, callback) {
callback(null, Date.now() + file.originalname);
}
});
var imageFilter = function (req, file, cb) {
// accept image files only
if (!file.originalname.match(/\.(jpg|jpeg|png|gif)$/i)) {
return cb(new Error('Only image files are allowed!'), false);
}
cb(null, true);
};
var upload = multer({ storage: storage, fileFilter: imageFilter});
var cloudinary = require('cloudinary');
cloudinary.config({
cloud_name: 'digi9mjbp',
api_key: process.env.CLOUDINARY_API_KEY,
api_secret: process.env.CLOUDINARY_API_SECRET
})
router.post("/", middleware.isLoggedIn, upload.single('image'),
function(req, res) {
cloudinary.v2.uploader.upload(req.file.path, function(err,result){
if(err){
req.flash("error", err.message);
return res.redirect("back");
}
// add cloudinary url for the image to the topic object under image
property
req.body.topic.image = result.secure_url;
//add image's public_id to topic object
req.body.topic.imageId = result.public_id;
// add author to topic
req.body.topic.author = {
id: req.user._id,
username: req.user.username
};
Topic.create(req.body.topic, function(err, topic){
if (err) {
req.flash('error', err.message);
return res.redirect('back');
}
res.redirect('/topics/' + topic.id);
});
});
});
When i use this function in Cloud Code Parse.User.current() return null.
I'm using parseExpressCookieSession for login.
Any advice?
var express = require('express');
var expressLayouts = require('cloud/express-layouts');
var parseExpressHttpsRedirect = require('parse-express-https-redirect');
var parseExpressCookieSession = require('parse-express-cookie-session');
// Required for initializing enter code hereExpress app in Cloud Code.
var app = express();
// Global app configuration section
app.set('views', 'cloud/views');
app.set('view engine', 'ejs'); // Switch to Jade by replacing ejs with jade here.
app.use(expressLayouts); // Use the layout engine for express
app.set('layout', 'layout');
app.use(parseExpressHttpsRedirect()); // Require user to be on HTTPS.
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(express.cookieParser('helloworld'));
app.use(parseExpressCookieSession({
fetchUser: true,
cookie: { maxAge: 3600000 * 24 }
}));
Parse.Cloud.beforeSave('Menu', function(request, response) {
var Business = Parse.Object.extend('Business');
var query = new Parse.Query(Business);
query.equalTo('profile', Parse.User.current().get('profile'));
query.find({
success: function(business) {
console.log(business);
response.success();
},
error: function(error) {
response.error(error.message);
}
});
});
app.listen();
This the code that i use to login/logout
app.post('/login', function(req, res) {
Parse.User.logIn(req.body.username, req.body.password).then(function(user) {
// Login succeeded, redirect to homepage.
// parseExpressCookieSession will automatically set cookie.
res.redirect('/');
},
function(error) {
// Login failed, redirect back to login form.
res.redirect('/');
});
});
// Logs out the user
app.post('/logout', function(req, res) {
Parse.User.logOut();
res.redirect('/');
});
It is an old question but answering for future reference.
Parse.User.current() works in Javascript SDK when used in clients ex. WebApp where users log in and the you can fetch the current user with that function.
To get the user calling a Cloud Code function or doing an operation on an object (beforeSave,afterSave,beforeDelete and so on) you use the request.user property it contains the user issuing the request to Parse.com.
More details about Parse.Cloud.FunctionRequest here: https://parse.com/docs/js/api/classes/Parse.Cloud.FunctionRequest.html
Example code:
Parse.Cloud.beforeSave('Menu', function(request, response) {
var requestUser = request.user;
// instance of Parse.User object of the user calling .save() on an object of class "Menu"
// code cut for brevity
});