I have sent a GET request using clash of clans API and axios and provided the authentication for it to work locally however it doesn't work when deployed as heroku uses dynamic Ip addresses.
I am looking to use static Ip addresses which Heroku will use to authorize my api request. I have chosen fixie however I dont think my server is actually using the static IP as when navigate to the endpoint, the page fails and the network tab shows a remote connection from what seems to be a heroku IP address. Essentially I need to whitelist an Ip.
Here is my code:
require("dotenv").config();
const express = require("express");
const axios = require("axios");
const app = express();
const path = require('path');
const url = require('url');
const fixieUrl = url.parse(process.env.FIXIE_URL);
const requestUrl = url.parse('https://api.clashofclans.com/v1/players/%232889v22uq');
// const URL = "https://api.clashofclans.com/v1/players/%232889v22uq";
const options = {
headers: {
Host: requestUrl.host,
'Proxy-Authorization': `Basic ${Buffer.from(fixieUrl.auth).toString('base64')}`,
"Authorization": `Bearer ${process.env.API_TOKEN}`
},
host: fixieUrl.hostname,
port: fixieUrl.port,
path: requestUrl.href,
};
app.get("/api", (req, res) => {
const clashReq = async () => {
try {
const response = await axios.get(requestUrl, options);
const {
name,
townHallLevel,
trophies,
bestTrophies,
builderHallLevel,
league: {
name: leagueName,
iconUrls: { medium: mediumIcon },
},
legendStatistics: {
previousSeason: { trophies: previousTrophies},
bestSeason: { trophies: bestSeasonTrophies},
currentSeason: { trophies: currentTrophies},
},
} = response.data;
res.json({
name,
townHallLevel,
trophies,
bestTrophies,
builderHallLevel,
leagueName,
mediumIcon,
previousTrophies,
bestSeasonTrophies,
currentTrophies
}
);
} catch (error) {
console.log(error);
}
};
clashReq();
console.log(res.statusCode);
});
if (process.env.NODE_ENV === "production") {
app.use(express.static(path.join(__dirname, "/client/build")));
app.get("*", (req, res) => {
res.sendFile(path.join(__dirname, "/client/build", "index.html"));
});
}
app.listen(process.env.PORT || 3001, () => {
console.log(`Server running`);
});
Axios can make HTTPS requests over a proxy out of the box. You just need to provide a proxy configuration object in options:
const axios = require('axios');
const url = require('url');
const fixieUrl = url.parse(process.env.FIXIE_URL);
const fixieAuth = fixieUrl.auth.split(':');
axios.get('https://example.com', {
proxy: {
protocol: 'http',
host: fixieUrl.hostname,
port: fixieUrl.port,
auth: {username: fixieAuth[0], password: fixieAuth[1]}
}
}).then(response => {
console.log(response.status);
});
If you do this, you can remove the Proxy-Authorization and Host headers, as well as the explicit host, port, and and path options.
What Axios is doing under the hood is that it makes a CONNECT request to Fixie to open an HTTP tunnel to the remote, and the request to the remote is then made over that tunnel. This is different than how an HTTP request is proxied, but Axios abstracts that difference away.
Related
CRUD APP dosent send anything to my API. Do you have an idea what the Problem could be?
On localhost:8080
I get Status 200 + "Transmission failed"
On localhost:3000
NETWORK-TAB give me Status 200. Got nothing from localhost:8080
I also checked my Cache.
Backend: (localhost:3000)
set Cors
set body-parser
Checked connection to DB is running and fetching data from the database.
Firewall:
1.disabled than tried again... nothing.
2.created incoming and outgoing rules for port 8080 and for 3000. They are active.
Frontend: (localhost:8080) Vue:
<template>
<div class="home">
<h1>Test</h1>
<div class="user-add">
<h2>Add user</h2>
<input type="text" v-model="name" /> <br />
<button #click="storeUser">Store</button>
</div>
</div>
</template>
<script>
import axios from "axios";
export default {
name:'CRUD2',
data() {
return {
user: {},
};
},
mounted() {
},
methods: {
async storeUser() {
try {
const user = await axios.post(
"http://localhost:3000/easy",
{
name: this.name,
}
);
console.log(user)
if (user.status === 201) {
console.log("Transmission successful
");
} else {
console.error("Transmission failed
");
}
} catch (error) {
console.error(error);
}
},
}
};
</script>
Backend: index.js Node(express) localhost(3000):
const express = require("express");
const oracledb = require('oracledb');
const cors = require('cors');
const bodyParser = require("body-parser");
const app = express();
app.use(express.json());
app.use(cors({
origin: 'http://localhost:8080'
}))
/* I tried also do this..
app.use(function(req, res, next) {
req.header("Access-Control-Allow-Origin", "*");
req.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});*/
app.use(bodyParser.json());
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT;
//GET
app.get("/easy", async (req, res) => {
let con;
try {
con = await oracledb.getConnection({
user: "user",
password: "password",
connectString: "h2922093.stratoserver.net:1521/orcl.stratoserver.net"
});
const data = await con.execute(`SELECT * FROM EASY`);
res.json(data.rows);
} catch (err) {
console.error(err);
res.sendStatus(500);
} finally {
if (con) {
await con.close();
}
}
});
//POST
app.post("/easy", async (req, res) => {
let con;
try {
con = await oracledb.getConnection({
user: "user",
password: "password",
connectString: "h2922093.stratoserver.net:1521/orcl.stratoserver.net"
});
const body = req.body;
const sql = `INSERT INTO EASY (NAME)
VALUES (:name)`;
const result = await con.execute(sql, [
body.name,
]);
if(result.rowsAffected){
res.send({
message: `${result.rowsAffected} row(s) inserted`,
status: 201
})
}else {
res.sendStatus(500);
}
} catch (err) {
console.error(err);
res.sendStatus(500);
} finally {
if (con) {
await con.close();
}
}
});
app.listen(3000, () => {
console.log("Server started on http://localhost:3000");
});
localhost:8080
localhost:8080
localhost:3000
`
`I tried:
Check my code in Backend;
Check my code in Frontend;
Backend: (localhost:3000)
-set Cors
-set body-parser
-Checked connection to DB is running and fetching data from the database.
Firewall:
1.disabled than tried again... nothing.
2.created incoming and outgoing rules for port 8080 and for 3000. They are active.
I´m expecting:
to send POST {test2} from localhost:8000 to localhost:3000 and INSERT test2 into Table EASY`
I solved the Issue myself:
Frontend:
async addData() {
//You can give an id for your input (id="nameInput")
const name = nameInput.value;
nameInput.value = "";
fetch('http://localhost:3000/easy', {
headers: {
'Content-type': 'application/json'
},
method: 'POST',
body: JSON.stringify({
name: name
})
})
.then(response => response.json())
}
}
Backend:
And please not forget in backend in your index.js file to define (and install of course the pakets):
const express = require('express');
const app = express();
const cors = require('cors');
const oracledb = require('oracledb');
oracledb.outFormat = oracledb.OUT_FORMAT_OBJECT;
app.use(cors());
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
I tested with Postman:
Get Request works properly that means I got only the one Object and Status 200 back
[{
“NAME”: “Test”
}]
Post Request http: //localhost:3000/easy: I send it as a JSON : { “name”: “test2”} and I got also the message Status 201. But that is strange because nothing was inserted into my table or see anything on my localhost:3000/easy except the one Object with the name: Test.
{
"message": "1 row(s) inserted",
"status": 201
}
I set also the headers:
app.use(function (req, res, next) {
res.header("Access-Control-Allow-Origin", "http://localhost:8080");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
And I changed the CORS Library to cors-express: const cors = require('cors-express');
I read through other post with similar issue which I couldn't solve. I created a simple chat using socket.io, express. On the localhost its working but not on the 000webhost. I'm probably missing a code somewhere.
Tried adding line of codes on the server file and the client side but still couldn't get it to run.
server side
const express = require("express");
const Socket = require("socket.io");
const app = express();
const server = require("http").createServer(app);
const io = Socket(server, {
cors: {
origin: "*",
method: ["GET", "POST"]
}
})
let PORT = 5000;
server.listen(PORT, () => {
console.log("listening on port: ", PORT)
})
const users = [];
io.on("connection", (socket) => {
console.log("connected to", socket.id)
socket.on("adduser", (username)=>{
socket.user = username;
users.push(username);
io.sockets.emit("users", users)
})
socket.on("message", (message)=>{
io.sockets.emit("message_client", {
message,
user: socket.user
})
})
socket.on("disconnect", () => {
console.log("we are disconnecting...: ", socket.user)
if (socket.user){
users.splice(users.indexOf(socket.user), 1)
io.sockets.emit("users", users);
console.log('remaining users: ', users)
}
})
})
I am running on the latest version of socket.io, the server code and client code below works well.
// server
const { Server } = require("socket.io"),
http = require('http');
const httpserver = http.createServer();
io.on("connection", async (socket) => {
socket.on("error", (err) => {
console.log(err.message);
});
socket.on('disconnect', function () {
console.log('socket disconnect');
})
});
const io = new Server(httpserver, {
cors: { origin: "*", methods: ["GET", "POST"],}
});
httpserver.listen(3001, () => {
console.log('listening on *:3001');
});
// client
import { io, Socket } from "socket.io-client";
const socket = io('ws://127.0.0.1:3001', {
transports: ["websocket"]
});
socket.on("connect_error", (err) => {
console.log(`connect_error due to ${err.message}`);
});
then I tried to work with namespace in socket.io
// server
io.of("device").on("connection", async (socket) => {
socket.on("error", (err) => {
console.log(err.message);
});
socket.on('disconnect', function () {
console.log('socket disconnect');
})
});
// client
const socket = io('ws://127.0.0.1:3001/device', {
transports: ["websocket"]
});
running the code gives me an error saying
'connect_error due to Invalid namespace''
I can't figure out what goes wrong
Using ws://127.0.0.1:3001/device means you are trying to reach the namespace named '/advice', which does not exist on the server.
I think you are looking for the path option instead:
const socket = io("ws://127.0.0.1:3001", {
path: "/device",
transports: ["websocket"]
});
References:
https://socket.io/docs/v4/client-initialization/
https://socket.io/docs/v4/client-options/#path
https://socket.io/docs/v4/namespaces/
I am trying to get ACL behavior by implementing my own webhooks for VerneMQ. I am using express and apicache node packages for this. I hope the code is meaningful to non-javascript-programmers as well.
In my vernemq.conf I have set up my hooks, and they get called correctly:
$ vmq-admin webhooks show
+-----------------+------------------------------+-------------+
| hook | endpoint |base64payload|
+-----------------+------------------------------+-------------+
|auth_on_subscribe|http://127.0.0.1:3000/vmq/sub | true |
|auth_on_register |http://127.0.0.1:3000/vmq/auth| true |
| auth_on_publish |http://127.0.0.1:3000/vmq/pub | true |
+-----------------+------------------------------+-------------+
Also I disabled all other plugins and disabled anonymous login.
My webhooks implementation in express (simplified):
const express = require('express');
const apicache = require('apicache');
const bodyparser = require('body-parser');
// short cache times for demonstration
const authcache = apicache.middleware('15 seconds');
const pubcache = apicache.middleware('5 seconds');
const subcache = apicache.middleware('10 seconds');
const app = express();
const jsonparser = bodyparser.json();
app.use((req, res, next) => {
console.log(`${req.connection.remoteAddress}:${req.connection.remotePort} ${req.method} ${req.path}`);
return next();
});
app.post('/vmq/auth', authcache, (req, res) => {
return res.status(200).json({result: 'ok'});
});
app.post('/vmq/pub', pubcache, jsonparser, (req, res) => {
// this gets ignored most of the time because of caching
if (req.body.topic === 'only/allowed/topic') {
return res.status(200).json({result: 'ok'});
}
return res.status(401).end();
});
app.post('/vmq/sub', subcache, (req, res) => {
return res.status(200).json({result: 'ok'});
});
app.use((req, res, next) => {
return res.status(404).end();
});
app.use((err, res, req, next) => {
console.error(err);
return res.status(500).end();
});
const server = app.listen(3000, 'localhost', () => {
const address = server.address();
return console.log(`listening on ${address.address}:${address.port} ...`);
});
Using mqtt.js I wrote a client (simplified):
const mqtt = require('mqtt');
const client = mqtt.connect('mqtt://localhost');
client.on('connect', () => {
setInterval(() => {
client.publish('only/allowed/topic', 'working');
client.publish('some/disallowed/topic', 'working too :(');
}, 500);
return client.subscribe('some/disallowed/topic');
});
client.on('message', (topic, message) => {
return console.log(`${topic}:${message}`);
});
What happens is that the client successfully authenticates and then publishes to only/allowed/topic, which is allowed and gets cached as successful by VerneMQ. However, since the successful call to /vmq/pub is now cached, publishing to some/disallowed/topic also works. If I change the order of publishing, both will fail.
I would have expected VerneMQ to map the cached results to all parameters in a call, except the payload of course, and not just to a client connection. However that is not the case. What's a possible way to implement ACL via webhooks while using caching? Not using caching is out of the question, as this kills my performance, and caching is recommended by the docs anways.
Also, will someone with 1500+ rep be so nice and create the tag vernemq? :)
I misunderstood how apicache works and what it actually does. All I needed to do was just setting the appropriate header for caching, as stated in the docs of VerneMQ. Apparently apicache stores the actual result and returns that whenever asked within the specified timeframe, no matter what the client is actually requesting.
This is the working code now:
const express = require('express');
const bodyparser = require('body-parser');
const app = express();
// short cache times for demonstration (in seconds)
const authcachetime = 15;
const pubcachetime = 5;
const subcachetime = 10;
const jsonparser = bodyparser.json();
app.use((req, res, next) => {
console.log(`${req.connection.remoteAddress}:${req.connection.remotePort} ${req.method} ${req.path}`);
return next();
});
app.post('/vmq/auth', (req, res) => {
res.set('cache-control', `max-age=${authcachetime}`);
return res.status(200).json({result: 'ok'});
});
app.post('/vmq/pub', jsonparser, (req, res) => {
res.set('cache-control', `max-age=${pubcachetime}`);
if (req.body.topic === 'only/allowed/topic') {
return res.status(200).json({result: 'ok'});
}
return res.status(401).end();
});
app.post('/vmq/sub', (req, res) => {
res.set('cache-control', `max-age=${subcachetime}`);
return res.status(200).json({result: 'ok'});
});
app.use((req, res, next) => {
return res.status(404).end();
});
app.use((err, res, req, next) => {
console.error(err);
return res.status(500).end();
});
const server = app.listen(3000, 'localhost', () => {
const address = server.address();
return console.log(`listening on ${address.address}:${address.port} ...`);
});
As expected the client now gets an error when it tries to publish to an illegal topic.
In my app, I send a post request to the server with data containing a CSV file:
$.ajax({
type:"POST",
contentType: "application/json",
url:"/",
data: JSON.stringify({fileData:My_CSV_FILE}),
success: function(csvJson) {
console.log('in the done block!');
//can use csvJson in this handler
});
});
Note: I'm posting to the home route, and I am able to get a response with the data converted from the server. The problem is that whether I run on localhost or Heroku, I am only able to trigger the POST request once, then I have to restart the server (even if I refresh the page). So I know the issue is with my route somewhere:
UPDATED TO INCLUDE FULL SERVER FILE:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
var converter = new Converter({});
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.json(result);
}else {
res.json({error: 'Could not convert'});
}
})
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});
I'm using Express 4. Again, everything works, but only once. When I run Heroku logs, or check the console on localhost I get:
Error: Can't set headers after they are sent.
But I don't understand how I'm re-setting them.
If wanting to run on localhost, here is a link to the projects github: https://github.com/qctimes/calendar_export
You should move the converter instantiation to be done inside the app.post callback method. This way it will instantiate a new object at every request.
This is is how your code should be:
'use strict';
const express = require('express');
const csvtojson = require('csvtojson');
const PORT = process.env.PORT || 3000;
const bodyParser = require('body-parser');
const Converter = require('csvtojson').Converter;
let app = express();
app.use(bodyParser.json({limit: '300kb'}));
app.use(express.static(__dirname +'/public'));
app.post('/',function(req,res) {
var csvFile = (req.body.fileData);
var converter = new Converter({}); // instantiation is done here
converter.fromString(csvFile, function(err, result) {
if(!err) {
console.log(result);
res.send(result);
}else {
res.send({error: 'Could not convert'});
}
});
});
app.listen(PORT, () => {
console.log(`app listening on port ${PORT}`);
});