I have a next js project where one module.exports can run but both can't run at the same time. How do I combine both module exports?
The module.exports = withSass({ is confusing me. How can this be added to the module.exports above it
// next.config.js
module.exports = {
serverRuntimeConfig: { // Will only be available on the server side
mySecret: 'secret'
},
publicRuntimeConfig: { // Will be available on both server and client
staticFolder: '/static',
appId: 'XXXXXXXXXX',
apiKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
}
}
const withSass = require('#zeit/next-sass')
module.exports = withSass({
/* config options here */
})
So with next.js options, you simply move them into the nextConfig-argument you send to withSass(), like so:
const withSass = require('#zeit/next-sass')
module.exports = withSass({
serverRuntimeConfig: {
mySecret: 'secret'
},
publicRuntimeConfig: {
staticFolder: '/static',
appId: 'XXXXXXXXXX',
apiKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
}
})
Related
There are several services that process graphql-requests on relative URLs.
Example: http://service:8080/graphql/a3333333-b111-c111-d111-e00000000011.
And I don’t understand how to config apolloGateway that part of url is a variable.
Try config url as:
const test_service: string = 'http://service:8080/graphql/:project';
supergraphSdl: new IntrospectAndCompose({
subgraphs: [
{ name: 'test_service', url: test_service },
],
}),
So full service look like:
import { ApolloServer } from 'apollo-server'
import { ApolloGateway, IntrospectAndCompose, RemoteGraphQLDataSource } from '#apollo/gateway'
const test_service: string = 'http://service:8080/graphql/:project';
const gateway_main = new ApolloGateway({
supergraphSdl: new IntrospectAndCompose({
subgraphs: [
{ name: 'test_service', url: test_service },
],
}),
});
const server_main = new ApolloServer({
introspection: true,
gateway: gateway_main
});
server_main.listen(8060).then(({ url }) => {
console.log(`Server ready at ${url}`);
});
But if I send a request to url http://service:8060/graphql/a3333333-b111-c111-d111-e00000000011
test_service get it as http://service/graphql/:project
So test_service lose uuid-data
I have built a simple koa framework application.After adding routes am trying to hit the /health GET route. It throws the below error:
TypeError: next is not a function
at cookieParser (c:\Userxxs\x\gimt\dp-my-app\node_modules\cookie-parser\index.js:46:14)
at dispatch (c:\Users\B748806a\gimt\dp-my-app\node_modules\koa-compose\index.js:42:32)
at bodyParser (c:\Users\B748806a\gimt\dp-my-app\node_modules\koa-bodyparser\index.js:95:11)
at processTicksAndRejections (internal/process/task_queues.js:93:5)
Below are the files and their order of execution:
server.js
const app = require("./app.js");
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => console.log(`Listening on port: ${PORT}`));
app.js
"use strict";
const koa = require('koa');
const koaRouter = require('koa-router');
const app = new koa();
const router = new koaRouter();
const middleware = require("./src/main/middlewares/middlewares");
const routes = require("./src/main/middlewares/route-path");
const init = async () => {
try {
/**
* Step 2: load endpoint routes for the application
*/
routes(router)
} catch (err) {
logger.error({
err
});
}
};
/**
* Step 1: load middleware setup - cors,helmet from KP Spring cloud service
*/
middleware(app);
init();
module.exports = app
middleware.js
const koa = require("koa");
const koaRouter = require('koa-router');
const router = new koaRouter();
const cors = require("koa-cors");
const compression = require("koa-compress");
const helmet = require("koa-helmet");
const cookieParser = require("cookie-parser");
const bodyParser = require('koa-bodyparser')
const ActuatorRouter = require('pm-js-actuator').koa //internal library
const ACTUATOR_OPTIONS = {
health: {
enabled: true
},
livenessprobe: {
enabled: true
},
env: {
enabled: false
},
info: {
enabled: true,
secure: false
}
}
function middleware(app) {
// Use the CORS for the time being.
app.use(cors())
// Let's don the security helmet
app.use(helmet())
app.use(helmet.frameguard())
app.use(helmet.ieNoOpen())
app.use(helmet.frameguard({
action: 'sameorigin'
}))
app.use(helmet.noSniff())
app.use(helmet.referrerPolicy({
policy: 'same-origin'
}))
app.use(helmet.xssFilter())
//app.disable('x-powered-by')
app.use(ActuatorRouter.getRouter(ACTUATOR_OPTIONS).routes())
app.use(bodyParser())
app.use(cookieParser());
// Set up compression
app.use(compression());
}
module.exports = middleware;
route-path.js
const RootHeathController = require("../controller/root-health-controller")
const routes = (router) => {
router.get("/health", RootHeathController.handler)
};
module.exports = routes
root-health-controller.js
const handler = async (ctx, next) => {
ctx.body="Hi";
}
module.exports = {
handler
};
The application is started successfully on port 3000. But when i hit, /health from postman, it throws the mentioned error. Any solution?
The problem here is, that cookie-parser seems to be an express - thing (see also repo url: https://github.com/expressjs/cookie-parser). So to test this I created a minimal version of your code:
const koa = require('koa');
const koaRouter = require('koa-router');
const bodyParser = require('koa-bodyparser');
const cookieParser = require("cookie-parser");
const app = new koa();
app.use(bodyParser());
app.use(cookieParser()); // <-- comment this line
const router = new koaRouter();
router.get("/health", async (ctx, next) => {
ctx.body = 'hi';
});
app.use(router.routes());
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => console.log(`Listening on port: ${PORT}`));
Calling the localist:3000/health endpoint throws the same error. But if you comment the app.use(cookie-parser()) line all works fine.
The question is, why you would need this library? You should be able to set and get cookies in koa with ctx.cookies.get and ctx.cookies.set
The erros occurs when i add
const replace = require("replace-in-file"); in file to my code
const replace = require("replace-in-file");
const options = {
files: "./config/dashboardData.json",
configFile: true,
from: /}\n{/g,
to: ",\n",
};
I have installed babel and configured the preset still I got the issue
The package replace-in-file handles files on the OS, so it needs to be called from Node.
You would need to set up a Cypress task.
In Cypress ver 10+, do this in cypress.config.js
// cypress.config.js
const { defineConfig } = require("cypress");
module.exports = defineConfig({
e2e: {
setupNodeEvents(on, config) {
on('task', {
replaceTask(options) {
const replace = require("replace-in-file");
try {
const results = replace.sync(options);
return results
}
catch (error) {
return error
}
},
})
},
// other e2e configuration here
},
});
// test.spec.cy.js
it('calls replace-in-file', () => {
const options = {
files: "./config/dashboardData.json",
configFile: true,
from: /}\n{/g,
to: ",\n",
};
cy.task('replaceTask', options).then(resultsOrError => {
console.log(resultsOrError)
})
});
I am getting this error in my heroku logs.
Same Question
All the solutions provided here did not address the issue.
I tried the different variations of the get method:
app.use(express.static('build'));
app.get('*', function (req, res) {
res.sendFile('index.html');
});
What else could I try or am I missing from here?
App.js
const configuration = require('#feathersjs/configuration');
const feathers = require('#feathersjs/feathers');
const express = require('#feathersjs/express');
const socketio = require('#feathersjs/socketio');
const moment = require('moment');
class IdeaService {
constructor() {
this.ideas = [];
}
async find() {
return this.ideas;
}
async create(data) {
const idea = {
id: this.ideas.length,
text: data.text,
tech: data.tech,
viewer: data.viewer
};
idea.time = moment().format('h:mm:ss a');
this.ideas.push(idea);
return idea;
}
}
const app = express(feathers());
app.feathers().configure(configuration());
app.use(express.static('build'));
app.get('*', function (req, res) {
res.sendFile('index.html');
});
// Parse JSON
app.use(express.json());
// Configure SocketIO realtime API
app.configure(socketio());
// Enable REST services
app.configure(express.rest());
// Register services
app.use('/ideas', new IdeaService());
// Connect new streams
app.on('connection', conn => app.channel('stream').join(conn));
// Publish events to stream
app.publish(data => app.channel('stream'));
const PORT = process.env.PORT || 3030;
app.listen(PORT).on('listening', () => console.log(`Server running on port ${PORT}`));
app.service('ideas').create({
text: 'Build a cool app',
tech: 'Node.js',
viewer: 'John Doe'
});
export default IdeaService;
package.json
I've been trying to fetch images from remote URL to Gatsby Source File system, to take advantage of lazy loading with gatsby-image plugin. I have a restful API which returns json with a string containing the image url. I followed this guide as I'm quite new to Gatsby Node Api and wasn't sure how to tackle this. Everything worked well until the point with adding additional properties to image with createNodeField. The properties seem to be added (I can see the object with fields property when I log the fileNode to the console. However, when trying to query the images, I get an error:
I'm wondering if there's something wrong in my code or is it due to the changes in gatsby? I'm using gatsby version 2.0.2. Is there a better option to somehow add additional properties to the image in order to be able to query just the needed ones?
Here's how my gatsby.node.js looks like:
const axios = require('axios');
const { createRemoteFileNode } = require(`gatsby-source-filesystem`);
exports.sourceNodes = ({ actions, createNodeId, node, store, cache } => {
const { createNode, createNodeField } = actions;
const processProject = project => {
project.photos.forEach(async photo => {
let fileNode;
try {
fileNode = await createRemoteFileNode({
url: photo.photo.url,
store,
cache,
createNode,
createNodeId: id => `projectPhoto-${photo.id}`,
});
await createNodeField({
node: fileNode,
name: 'ProjectPhoto',
value: 'true',
});
await createNodeField({
node: fileNode,
name: 'created_at',
value: photo.created_at,
});
} catch (error) {
console.warn('error creating node', error);
}
});
}
return axios.get(baseApiUrl).then(res => {
res.data.forEach(project => {
const nodeData = processProject(project);
createNode(nodeData);
});
});
}
In the end it seems that using .forEach with async/await was messing stuff up for some reason. Doing everything in for of loop, fixed the problem, although eslint was complaining about that a lot. Here's the code:
const axios = require('axios');
const { createRemoteFileNode } = require(`gatsby-source-filesystem`);
exports.sourceNodes = ({ actions, createNodeId, node, store, cache } => {
const { createNode, createNodeField } = actions;
const processProject = project => {
for (const photo of project.photos) {
let fileNode;
try {
fileNode = await createRemoteFileNode({
url: photo.photo.url,
store,
cache,
createNode,
createNodeId: id => `projectPhoto-${photo.id}`,
});
await createNodeField({
node: fileNode,
name: 'ProjectPhoto',
value: 'true',
});
await createNodeField({
node: fileNode,
name: 'created_at',
value: photo.created_at,
});
} catch (error) {
console.warn('error creating node', error);
}
}
}
return axios.get(baseApiUrl).then(res => {
res.data.forEach(project => {
const nodeData = processProject(project);
createNode(nodeData);
});
});
}