How to setup passenger with hapijs v18*? - passenger

I am trying to run a basic hapi v18 setup on a Digital Ocean droplet with phussion passenger and nginx.
I have made several searches on google on how to setup hapi with passenger but all info I found is on old hapi versions (previous v17).
This is all the code I have for my test:
'use strict';
if (typeof(PhusionPassenger) !== 'undefined') {
PhusionPassenger.configure({ autoInstall: false });
}
const Hapi = require('#hapi/hapi');
const init = async () => {
if (typeof(PhusionPassenger) !== 'undefined') {
// Requires Passenger >= 4.0.52!
server = new Hapi.Server('/passenger');
} else {
server = new Hapi.Server('localhost', 3000);
}
server.route({
method: 'GET',
path:'/',
handler: (request, h) => {
return 'Hello World!';
}
});
await server.start();
console.log('Server running on %s', server.info.uri);
};
process.on('unhandledRejection', (err) => {
console.log(err);
process.exit(1);
});
init();
When checking nginx error log, I get the following:
App 21225 output: TypeError: Cannot create property 'routes' on string '/passenger'
App 21225 output: at Object.internals.setup (/var/www/hapi-18-test/code/node_modules/#hapi/hapi/lib/core.js:598:21)
App 21225 output: at new module.exports.internals.Core (/var/www/hapi-18-test/code/node_modules/#hapi/hapi/lib/core.js:54:46)
App 21225 output: at new module.exports (/var/www/hapi-18-test/code/node_modules/#hapi/hapi/lib/server.js:22:18)
App 21225 output: at init (/var/www/hapi-18-test/code/index.js:13:18)
App 21225 output: at Object.<anonymous> (/var/www/hapi-18-test/code/index.js:37:1)
App 21225 output: at Module._compile (internal/modules/cjs/loader.js:774:30)
App 21225 output: at Object.Module._extensions..js (internal/modules/cjs/loader.js:785:10)
App 21225 output: at Module.load (internal/modules/cjs/loader.js:641:32)
App 21225 output: at Function.Module._load (internal/modules/cjs/loader.js:556:12)
App 21225 output: at Module.require (internal/modules/cjs/loader.js:681:19)
I just followed the example on passenger's website but I guess it is not working because of hapi's new versions.
So, how can I run hapi v18 on passenger?

I just found the solution by reading the Hapi documentation.
We have to replace:
if (typeof(PhusionPassenger) !== 'undefined') {
// Requires Passenger >= 4.0.52!
server = new Hapi.Server('/passenger');
} else {
server = new Hapi.Server('localhost', 3000);
}
}
With:
if (typeof(PhusionPassenger) !== 'undefined') {
// Requires Passenger >= 4.0.52!
server = new Hapi.Server({ port: '/passenger' });
} else {
server = new Hapi.Server('localhost', 3000);
}
}

Related

Previously-defined variable not working in other parts of test in nightwatchjs

I'm using nightwatchjs and the ssh2 npm package to test that a file name is present in a location, and asserting that it's correct.
My code is as follows;
var Client = require('ssh2');
var conn = new Client();
var longFileNamePrdArray;
module.exports = {
before:
conn.on('ready', function(browser) {
console.log('Client :: ready');
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('parkersreviewcontent/', function (err, list) {
if (err) throw err;
list.map(a => a.longname);
longFileNamePrdArray = list[1].longname;
conn.end();
});
});
})
.connect({
host: '*hostname*',
port: 22,
user: '*user*',
password: '*password*',
}),
'Test Zen Reviews production file is listed': function (browser) {
console.log(longFileNameStgArray);
},
'Closing the browser': function (browser) {
browser.browserEnd();
},
};
However, this results in undefined being outputted for longFileNamePrdArray.
If I move the console.log command inside the before code block, then the filename is correctly displayed as follows;
-rwxr--r-- 1 - - 2492238 Feb 28 06:37 parkers-reviews-staging.xml
but when I move it outside the before block and into my test block, it fails with the undefined output.
I thought by defining longFileNamePrdArray and stating it at the beginning of the test script it work 'carry across' the longFileNamePrdArray value into the test block, but it's not.
Any help and assistance would be really appreciated. Thanks.
I fixed this by using the done callback.
So my working code looks like this;
var Client = require('ssh2');
var conn = new Client();
var longFileNamePrdArray;
module.exports = {
before: function(browser, done) {
conn.on('ready', function(browser) {
console.log('Client :: ready');
conn.sftp(function(err, sftp) {
if (err) throw err;
sftp.readdir('parkersreviewcontent/', function (err, list) {
if (err) throw err;
list.map(a => a.longname);
longFileNamePrdArray = list[1].longname;
conn.end();
done();
});
});
})
.connect({
host: '*hostname*',
port: 22,
user: '*user*',
password: '*password*',
}),
'Test Zen Reviews production file is listed': function (browser) {
console.log(longFileNameStgArray);
},
'Closing the browser': function (browser) {
browser.browserEnd();
},
};

async await , nodejs readfile and then do other

I tried to read file through async/await and update the credentials , But i got response HI first and then hello in console.And Credentials also not updated in oauthClient2.
const getFile = async (req, res, next) => {
await fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return null;
console.log("hello")
console.log(JSON.parse(token));
oauth2Client.credentials = JSON.parse(token);
});
console.log("HI")
var service = google.drive({
version: 'v3',
encoding: null
});
console.log(oauth2Client);
await service.files.get({
auth: oauth2Client,
fileId: "1ZR8kkvb2JYVxcUjmlgfBJD2IYnisaiFn",
alt: 'media'
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
responder(res)(null,response);
});
}
Is there a way in which everything should run in a order?
Thank You.
The reason you are experiencing the error you see, is because the method you are running is asynchronous. You should instead use the alternative synchronous version:
let token;
try {
token = fs.readFileSync(TOKEN_PATH, 'utf8');
} catch (err) {
console.error(err)
}
if(!token){ return; }
oauth2Client.credentials = token;
const service = google.drive({
version: 'v3',
encoding: null
});
await service.files.get({
auth: oauth2Client,
fileId: "1ZR8kkvb2JYVxcUjmlgfBJD2IYnisaiFn",
alt: 'media'
}, function(err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
responder(res)(null,response);
});
Ref: https://nodejs.dev/learn/reading-files-with-nodejs

Heroku app crashes when I try to post an image to server from client

I've successfully deployed a CRUD app on Heroku. And everything works fine on the deployed web app until I send a POST request to Heroku to post a picture to the server that then sends to S3. Everything works fine, including the picture post request, locally. However I get the following error message when I hit the deployed heroku server.
POST https://backend.herokuapp.com/ 503 (Service Unavailable)
Access to fetch at 'https://backend.herokuapp.com/' from origin 'https://frontend.netlify.com' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource. If an opaque response serves your needs, set the request's mode to 'no-cors' to fetch the resource with CORS disabled.
bundle.esm.js:63 Uncaught (in promise) Error: Network error: Failed to fetch
at new t (bundle.esm.js:63)
at Object.error (bundle.esm.js:1030)
at g (Observable.js:140)
at O (Observable.js:179)
at e.value (Observable.js:240)
at bundle.esm.js:869
at Set.forEach (<anonymous>)
at Object.error (bundle.esm.js:869)
at g (Observable.js:140)
at O (Observable.js:179)
This is my code to save the POSTed picture on the server, send it to S3, and then delete the photo on the server.
import * as shortid from "shortid";
import { createWriteStream, createReadStream, unlinkSync } from "fs";
const aws = require("aws-sdk");
aws.config.update({
accessKeyId: process.env.AWS_accessKeyId,
secretAccessKey: process.env.AWS_secretAccessKey
});
const BUCKET_NAME = "dormsurf";
const s3 = new aws.S3();
const storeUpload = async (stream: any, mimetype: string): Promise<any> => {
// aseq2
const extension = mimetype.split("/")[1];
console.log("extension: ", extension);
const id = `${shortid.generate()}.${extension}`;
const path = `src/images/${id}`;
console.log("path", path);
return new Promise((resolve, reject) =>
stream
.pipe(createWriteStream(path))
.on("finish", () => resolve({ id, path }))
.on("error", reject)
);
};
export const processUpload = async (upload: any) => {
const { stream, mimetype } = await upload;
const { id } = await storeUpload(stream, mimetype);
console.log("id");
console.log(id);
var params = {
Bucket: BUCKET_NAME,
Key: `listings_images/${id}`,
Body: createReadStream(`src/images/${id}`)
};
s3.upload(params, function(err, data) {
if (err) {
console.log("error in callback");
console.log(err);
}
console.log("success");
console.log(data);
try {
unlinkSync(`src/images/${id}`);
//file removed
} catch (err) {
console.error(err);
}
});
return id;
};
Thank you so much for the help!
this error is a CORS error fix this error using proxy
call you backend api using this proxy https://cors-anywhere.herokuapp.com/
in your front call api like this
https://cors-anywhere.herokuapp.com/https://backend.herokuapp.com/

What is the correct field to pass --staging-location parameter for a Dataflow job in Node.js?

I wonder if I've hit a bug. A wrote a Node.js piece of code to trigger a "GCS Text to PubSub" Dataflow. The function is triggered upon file upload into a GCS bucket.
But it never executes successfully: "textPayload: "problem running dataflow template, error was: { Error: Invalid JSON payload received. Unknown name "staging_location": Cannot find field." It is an issue with the syntax of I specify the staging location for the job. I have tried "staginglocation", "stagingLocation", etc...none of them have worked.
Here's my code. Thanks for your help.
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
projectId: projectId,
resource: {
parameters: {
inputFile: `gs://${file.bucket}/${file.name}`,
outputTopic: `projects/iot-fitness-198120/topics/MemberFitnessData`,
},
jobName: 'CStoPubSub',
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
stagingLocation: 'gs://fitnessanalytics-tmp/tmp'
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};
I don't think this is actually possible.
Looking at the documentation for the Dataflow API itself, there's nothing like a staging location in the parameter section, and the library you're using is basically a wrapper for this API.
I'm a bit surprised it changes the name of the parameter though.
So i finally got this to work. It was indeed a syntax issue in the parameters section. The code below works like a charm:
var {google} = require('googleapis');
exports.moveDataFromGCStoPubSub = (event, callback) => {
const file = event.data;
const context = event.context;
console.log(`Event ${context.eventId}`);
console.log(` Event Type: ${context.eventType}`);
console.log(` Bucket: ${file.bucket}`);
console.log(` File: ${file.name}`);
console.log(` Metageneration: ${file.metageneration}`);
console.log(` Created: ${file.timeCreated}`);
console.log(` Updated: ${file.updated}`);
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
console.log(projectId);
const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
console.log(`gs://${file.bucket}/${file.name}`);
dataflow.projects.templates.create({
gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
projectId: projectId,
resource: {
parameters: {
inputFilePattern: `gs://${file.bucket}/${file.name}`,
outputTopic: 'projects/iot-fitness-198120/topics/MemberFitnessData2'
},
environment: {
tempLocation: 'gs://fitnessanalytics-tmp/tmp'
},
jobName: 'CStoPubSub',
//gcsPath: 'gs://dataflow-templates/latest/GCS_Text_to_Cloud_PubSub',
}
}, function(err, response) {
if (err) {
console.error("problem running dataflow template, error was: ", err);
}
console.log("Dataflow template response: ", response);
callback();
});
});
callback();
};

How do I get superagent-hawk to work with supertest

I have an API that I am trying to test. It uses hawk authentication. I have successfully test failure codes using supertest, but I'm not sure how to include the hawk authentication within test script. I have found and installed superagent-hawk, which states it does work with supertest. Unfortunately, I am fairly new to all this and am unsure how to set it up.
Here's what I have (test.js):
var should = require('should');
var superTest = require('supertest')('mywebsite:3000/');
var addHawk = require('superagent-hawk');
var request = addHawk(superTest);
describe('controlllers', function() {
describe('preregControllers', function() {
describe('GET /mypage', function() {
it('should be response code 200', function(done) {
var creds = {
"id": "testUser",
"key": "testPass",
"algorithm": "sha256"
}
request
.get('mypage')
.hawk(creds)
.set('Accept', 'applications/json')
.expect('Content-Type', /json/)
.expect(200)
.end(function(err, res) {
if (err) return done(err);
done();
});
});
});
});
});
When I try to run mocha test.js, I get the following:
mocha test.js
~/Projects/myproject/node_modules/superagent-hawk/index.js:9
: superagent.Request.prototype;
^
TypeError: Cannot read property 'prototype' of undefined
at module.exports (~/Projects/myproject/node_modules/superagent-hawk/index.js:9:43)
at Object.<anonymous> (~/Projects/myproject/test/api/controllers/test.js:4:16)
at Module._compile (module.js:413:34)
at Object.Module._extensions..js (module.js:422:10)
at Module.load (module.js:357:32)
at Function.Module._load (module.js:314:12)
at Module.require (module.js:367:17)
at require (internal/module.js:16:19)
at /usr/local/lib/node_modules/mocha/lib/mocha.js:219:27
at Array.forEach (native)
at Mocha.loadFiles (/usr/local/lib/node_modules/mocha/lib/mocha.js:216:14)
at Mocha.run (/usr/local/lib/node_modules/mocha/lib/mocha.js:468:10)
at Object.<anonymous> (/usr/local/lib/node_modules/mocha/bin/_mocha:403:18)
at Module._compile (module.js:413:34)
at Object.Module._extensions..js (module.js:422:10)
at Module.load (module.js:357:32)
at Function.Module._load (module.js:314:12)
at Function.Module.runMain (module.js:447:10)
at startup (node.js:140:18)
at node.js:1001:3
I've also tried:
var request = require('supertest', 'superagent-hawk')('mywebsite:3000/');
but that gave me the following, which was not unexpected:
TypeError: request.get(...).hawk is not a function
My working code looks like the following:
var should = require('should');
var response = require('supertest')('mywebsite:3000/');
describe('controlllers', function() {
describe('preregControllers', function() {
describe('GET /mypage', function() {
it('should be response code 401', function(done) {
request
.get('mypage')
.set('Accept', 'applications/json')
.expect('Content-Type', /json/)
.expect(401,{"category":"AUTHORIZATION","context":"Validating user credentials","message":"Unauthorized"})
.end(function(err, res) {
if (err) return done(err);
done();
});
});
});
});
});
Okay, I figured it out, partially by looking at superagent-hawk's own tests/hawk.js file, and by playing around. Here's how I did it:
var should = require('should');
var addHawk = require('superagent-hawk');
var superTest = addHawk(require('supertest'));
var request = superTest('mywebsite:3000/');
describe('controlllers', function() {
describe('preregControllers', function() {
describe('GET /mypage', function() {
it('should be response code 200', function(done) {
var creds = {
"id": "testUser",
"key": "testPass",
"algorithm": "sha256"
}
request
.get('mypage')
.hawk(creds)
.set('Accept', 'applications/json')
.expect('Content-Type', /json/)
.expect(200)
.end(function(err, res) {
if (err) return done(err);
done();
});
});
});
});
});

Resources