Serve static image using netlify? - netlify-cms

I am currently using express and I want to host my website with netlify but the problem is I don't know how to serve static images with netlify. While normally running the app not using netlify everything works. Is there a way to serve static images while hosting nodejs app on netlify ?
const express = require("express");
const serverless = require("serverless-http");
const bodyParser = require("body-parser");
const path = require("path");
const app = express();
app.use((req, res, next) => {
res.setHeader("Access-Control-Allow-Origin", "*");
res.setHeader(
"Access-Control-Allow-Methods",
"GET, POST, PUT, PATCH, DELETE"
);
res.setHeader("Access-Control-Allow-Header", "Content-Type, Authorization");
next();
});
app.set("view engine", "ejs");
app.use(express.static(path.join(__dirname, "public")));
app.use("/images", express.static(path.join(__dirname, "images")));
const rootDir = require("./util/path");
const errorController = require("./controllers/error");
const projectRoutes = require("./routes/project");
const { json } = require("body-parser");
app.use(bodyParser.urlencoded({ extended: false }));
app.use("/.netlify/functions/app", projectRoutes);
module.exports.handler = serverless(app);
Folder Structure

Related

How do I configure my server to use a fixie static IP?

I have sent a GET request using clash of clans API and axios and provided the authentication for it to work locally however it doesn't work when deployed as heroku uses dynamic Ip addresses.
I am looking to use static Ip addresses which Heroku will use to authorize my api request. I have chosen fixie however I dont think my server is actually using the static IP as when navigate to the endpoint, the page fails and the network tab shows a remote connection from what seems to be a heroku IP address. Essentially I need to whitelist an Ip.
Here is my code:
require("dotenv").config();
const express = require("express");
const axios = require("axios");
const app = express();
const path = require('path');
const url = require('url');
const fixieUrl = url.parse(process.env.FIXIE_URL);
const requestUrl = url.parse('https://api.clashofclans.com/v1/players/%232889v22uq');
// const URL = "https://api.clashofclans.com/v1/players/%232889v22uq";
const options = {
headers: {
Host: requestUrl.host,
'Proxy-Authorization': `Basic ${Buffer.from(fixieUrl.auth).toString('base64')}`,
"Authorization": `Bearer ${process.env.API_TOKEN}`
},
host: fixieUrl.hostname,
port: fixieUrl.port,
path: requestUrl.href,
};
app.get("/api", (req, res) => {
const clashReq = async () => {
try {
const response = await axios.get(requestUrl, options);
const {
name,
townHallLevel,
trophies,
bestTrophies,
builderHallLevel,
league: {
name: leagueName,
iconUrls: { medium: mediumIcon },
},
legendStatistics: {
previousSeason: { trophies: previousTrophies},
bestSeason: { trophies: bestSeasonTrophies},
currentSeason: { trophies: currentTrophies},
},
} = response.data;
res.json({
name,
townHallLevel,
trophies,
bestTrophies,
builderHallLevel,
leagueName,
mediumIcon,
previousTrophies,
bestSeasonTrophies,
currentTrophies
}
);
} catch (error) {
console.log(error);
}
};
clashReq();
console.log(res.statusCode);
});
if (process.env.NODE_ENV === "production") {
app.use(express.static(path.join(__dirname, "/client/build")));
app.get("*", (req, res) => {
res.sendFile(path.join(__dirname, "/client/build", "index.html"));
});
}
app.listen(process.env.PORT || 3001, () => {
console.log(`Server running`);
});
Axios can make HTTPS requests over a proxy out of the box. You just need to provide a proxy configuration object in options:
const axios = require('axios');
const url = require('url');
const fixieUrl = url.parse(process.env.FIXIE_URL);
const fixieAuth = fixieUrl.auth.split(':');
axios.get('https://example.com', {
proxy: {
protocol: 'http',
host: fixieUrl.hostname,
port: fixieUrl.port,
auth: {username: fixieAuth[0], password: fixieAuth[1]}
}
}).then(response => {
console.log(response.status);
});
If you do this, you can remove the Proxy-Authorization and Host headers, as well as the explicit host, port, and and path options.
What Axios is doing under the hood is that it makes a CONNECT request to Fixie to open an HTTP tunnel to the remote, and the request to the remote is then made over that tunnel. This is different than how an HTTP request is proxied, but Axios abstracts that difference away.

Express server not running when using ng serve

My server.js is not getting executed when I use ng serve command from angular-cli. When I use node server, it works fine.
server.js:
const express = require('express')
const bodyParser = require('body-parser')
const path = require('path');
const http = require('http');
const api = require('./server/routes/api');
const port = 3000;
const app = express();
app.use(express.static(path.join(__dirname, 'dist/Colab')));
app.use(bodyParser.urlencoded({extended: true}));
app.use(bodyParser.json());
app.get('/api', api);
app.get('*', (req,res)=>{
res.sendFile(path.join(__dirname, 'dist/Colab/index.html'));
});
const server = http.createServer(app);
server.listen(port, ()=>console.log(`Server is running on localhost:${port}`));
Try using the following commands:
npm start
OR
node app.js

koa js async await not working when used with ajax and promise

I'm using koa as a rest backend but I can't get the routing and request/response to work properly, when the URL is called using axios the promise is failing.
server.js
const route = require('koa-route');
const serve = require('koa-static');
const Koa = require('koa');
const app = new Koa();
const path = require('path');
const bodyParser = require('koa-bodyparser');
const Datastore = require('nedb'),
db = new Datastore({
filename: __dirname +'/storage.db' ,
autoload: true
});
// something
app.use(bodyParser());
app.use(serve(__dirname + '/dist'));
app.use(route.get('/api/projects', async function (next) {
let projects = [];
await db.find({}, function (err, docs) {
projects = docs;
});
this.body = projects;
}));
const PORT = process.argv[2] || process.env.PORT || 3000;
app.listen(3000);
when I make a request to /api/projects using axios I get and empty array
from my package.json
"scripts": {
"start": "nodemon server.js --exec babel-node --presets es2015,stage-2"
},
You're mixing promises with callbacks, which is most likely causing your problems.
Stick to using promises:
app.use(route.get('/api/projects', async function (next) {
let projects = await db.find({});
this.body = projects;
}));

Navigate to a page after a GET request in express?

I'm fairly new to express and was wondering how to navigate to a page as a result of a GET request? I'm building my page from a template, and the following works if I manually input the '/id' url:
const express = require('express');
const template = require('./template.js');
const app = express();
const bodyParser = require('body-parser');
const port = process.env.PORT || 3000;
app.get('/', (req, res) => {
res.send(template({
body: "Home",
title: "Home Title"
}))
});
app.get('/id', (req, res) => {
console.log("received get request for id");
res.send(template({
body: "id",
title: "ID"
}));
});
const server = app.listen(port);
console.log("listening...");
But if I send a GET request from the client, it gets received and nothing happens. Any tips or help would be appreciated! Thank you!

Can't use Express to send data back to client more than once

In my app, I send a post request to the server with data containing a CSV file:
$.ajax({
type:"POST",
contentType: "application/json",
url:"/",
data: JSON.stringify({fileData:My_CSV_FILE}),
success: function(csvJson) {
console.log('in the done block!');
//can use csvJson in this handler
});
});
Note: I'm posting to the home route, and I am able to get a response with the data converted from the server. The problem is that whether I run on localhost or Heroku, I am only able to trigger the POST request once, then I have to restart the server (even if I refresh the page). So I know the issue is with my route somewhere:
UPDATED TO INCLUDE FULL SERVER FILE:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
var converter = new Converter({});
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.json(result);
}else {
res.json({error: 'Could not convert'});
}
})
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});
I'm using Express 4. Again, everything works, but only once. When I run Heroku logs, or check the console on localhost I get:
Error: Can't set headers after they are sent.
But I don't understand how I'm re-setting them.
If wanting to run on localhost, here is a link to the projects github: https://github.com/qctimes/calendar_export
You should move the converter instantiation to be done inside the app.post callback method. This way it will instantiate a new object at every request.
This is is how your code should be:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
var converter = new Converter({}); // instantiation is done here
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.send(result);
}else {
res.send({error: 'Could not convert'});
}
});
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});

Resources