I'm having an issue where despite replicating the pattern for using Sequelize in AWS Lambdas as described in the docs here: https://sequelize.org/docs/v6/other-topics/aws-lambda/ - the connections are still kept for 10+ minutes even though they are idle.
Here's my Sequelize class, that lives inside the DB package in a private CodeArtifact repo.
const { Sequelize, DataTypes } = require("sequelize");
const fs = require('fs');
const path = require('path');
const basename = path.basename(__filename);
let modelsMap = new Map();
let sequelize = null;
module.exports = class SequelizeClient {
constructor() {}
static async getSequelize(configuration, options){
if(!sequelize){
console.log('load sequelize');
sequelize = await SequelizeClient.#loadSequelize(configuration, options)
} else {
sequelize.connectionManager.initPools();
// restore `getConnection()` if it has been overwritten by `close()`
if (sequelize.connectionManager.hasOwnProperty("getConnection")) {
delete sequelize.connectionManager.getConnection;
}
}
return sequelize;
}
static #initSequelize(options) {
return new Sequelize(
options.database,
options.user,
options.password,
{
host: options.host,
port: parseInt(options.port) || 5432,
logging: console.log,
dialect: "postgres",
pool: {
max: 2,
min: 0,
idle: 1000,
acquire: 3000,
evict: 1000
}
},
);
}
static async #loadSequelize(configuration) {
sequelize = SequelizeClient.#initSequelize(configuration);
await sequelize.authenticate();
return sequelize;
}
static async getModels(sequelize) {
const databaseName = sequelize.config.database;
let models = modelsMap.get(databaseName);
if(models){
return models;
}
models = {};
console.log('load models');
fs
.readdirSync(`${__dirname}/models`)
.filter((file) => file.indexOf('.') !== 0 && file !== basename && file.slice(-3) === '.js')
.forEach((file) => {
const model = require(path.join(`${__dirname}/models`, file))(sequelize, DataTypes);
const name = model.name.charAt(0).toUpperCase() + model.name.slice(1);
models[name] = model;
});
Object.keys(models).forEach((modelName) => {
if (models[modelName].associate) {
models[modelName].associate(models);
}
});
modelsMap.set(databaseName, models);
return models;
}
async static closeConnections(){
await sequelize.connectionManager.close();
}
}
And here's how I use this code inside a Lambda.
const { SequelizeClient } = require("#myPrivatePackage/database");
exports.handler = async (event) => {
try {
const sequelize = await SequelizeClient.getSequelize(someDbParams);
const { MyModel } = await SequelizeClient.getModels(sequelize);
return await MyModel.findOne(whereObj);
} finally {
SequelizeClient.closeConnections();
}
};
Obviously, I cleaned up the code to remove things that are not directly impacting the issue, such as specific queries, code that I'd like to avoid being public etc.
Based on the info I gathered online I'd assume that the connections would be closed once the query is done, but checking pgsql with this
SELECT pid, now() - query_start as duration, query
FROM pg_stat_activity
WHERE pid <> pg_backend_pid()
AND datname = 'db_name'
order by duration desc;
Shows a lot of queries that have duration in excess of 5-10min. Eventually, after around 15min the connections go away.
Any idea as to what I'm doing wrong? Or missing? Or fundamently not understanding when it comes to these concepts? I've been scratching my head for a few days now.
Related
I have built a test app using nestjs + Sequelize ORM + docker database (as of now local). As per documentation, I am using umzug library and AWS Lambda SAM template and triggering lambda handler. Below is the code for it. Connection Pooling is implemented to reuse existing sequelize connection. Below is the lambdaEntry.ts file where I trigger umzug.up() function. It is triggering but not migrating files.
When done from command prompt node migrate up it works correctly. I am testing using sam invoke command to test it.
require('ts-node/register');
import { Server } from 'http';
import { NestFactory } from '#nestjs/core';
import { Context } from 'aws-lambda';
import * as serverlessExpress from 'aws-serverless-express';
import * as express from 'express';
import { ExpressAdapter } from '#nestjs/platform-express';
import { eventContext } from 'aws-serverless-express/middleware';
import { AppModule } from './app.module';
import sharedBootstrap from './sharedBootstrap';
const { Sequelize } = require('sequelize');
const { Umzug, SequelizeStorage } = require('umzug');
import configuration from '.././config/config';
const fs = require('fs');
let lambdaProxy: Server;
let sequelize = null;
async function bootstrap() {
const expressServer = express();
const nestApp = await NestFactory.create(
AppModule,
new ExpressAdapter(expressServer),
);
nestApp.use(eventContext());
sharedBootstrap(nestApp);
await nestApp.init();
return serverlessExpress.createServer(expressServer);
}
export const handler = (event: any, context: Context) => {
if (!lambdaProxy) {
bootstrap().then((server) => {
lambdaProxy = server;
serverlessExpress.proxy(lambdaProxy, event, context);
(async () => {
if (!sequelize) {
console.log('New connection::');
sequelize = await loadSequelize();
} else {
sequelize.connectionManager.initPools();
if (sequelize.connectionManager.hasOwnProperty('getConnection')) {
delete sequelize.connectionManager.getConnection;
}
}
try {
console.log('MIGRATOR::');
const umzug = new Umzug({
migrations: { glob: 'src/migrations/*.ts' },
context: sequelize.getQueryInterface(),
storage: new SequelizeStorage({ sequelize }),
logger: console,
});
await umzug
.pending()
.then((migrations: any) => {
console.log('pending ? : ', JSON.stringify(migrations));
//test for file exists.
for (const migration of migrations) {
try {
if (fs.existsSync(migration.path)) {
console.log('file exists');
}
} catch (err) {
console.log('file does not exists');
console.error(err);
}
}
async () => {
//BELOW FUNCTION IS TRIGGERING BUT NOT GETTING MIGRATION LOADED.
await umzug.up();
};
})
.catch((e: any) => console.log('error2 ? ', e));
} finally {
await sequelize.connectionManager.close();
}
})();
});
} else {
serverlessExpress.proxy(lambdaProxy, event, context);
}
};
async function loadSequelize() {
const sequelize = new Sequelize(
configuration.database,
configuration.username,
configuration.password,
{
dialect: 'mysql',
host: configuration.host,
port: Number(configuration.port),
pool: {
max: 2,
min: 0,
idle: 0,
acquire: 3000,
evict: 600,
},
},
);
await sequelize.authenticate();
return sequelize;
}
I am able to solve the issue after lot of tries. I seperated out the sequelize connection code and called it from app side and triggered from lambdaentry
lambdaEntry.js file.
async function bootstrap(uuid = null) {
console.log('Calling bootstrap');
const expressServer = express();
const nestApp = await NestFactory.create(
AppModule,
new ExpressAdapter(expressServer),
);
nestApp.use(eventContext());
sharedBootstrap(nestApp);
await nestApp.init();
try {
// Write a function in Service (ex: purhaslistservice) and trigger the service with umzug up from here.
const migrateResult1 = await nestApp.get(PurchaseListService).migrate('down');
console.log(migrateResult1);
const migrateResult2 = await nestApp.get(PurchaseListService).migrate('up');
console.log(migrateResult2);
} catch (err) {
throw err;
}
return serverlessExpress.createServer(expressServer);
}
export const handler = (event: any, context: Context) => {
if (!lambdaProxy) {
bootstrap(uuid).then((server) => {
lambdaProxy = server;
serverlessExpress.proxy(lambdaProxy, event, context);
});
} else {
serverlessExpress.proxy(lambdaProxy, event, context);
}
};
/code/src/purchaselist/purchaselist.service.ts
async migrate(id: string): Promise<any> {
console.log('migrate script triggered', id);
const sequelize = PurchaseListItem.sequelize;
const umzug = new Umzug({
migrations: { glob: 'src/migrations/*.{ts,js}' },
context: sequelize.getQueryInterface(),
storage: new SequelizeStorage({ sequelize }),
logger: console,
});
let consoleDisplay = 'Umzug LOGS:::<br/>';
switch (id) {
default:
case 'up':
await umzug.up().then(function (migrations) {
console.log('Umzug Migrations UP::<br/>', migrations);
consoleDisplay +=
'Umzug Migrations UP::<br/>' + JSON.stringify(migrations);
});
break;
}
return consoleDisplay;
}
I'm new to next js. And I have one user.js file inside of my pages directory in next.js. This is the source code:
// pages/user.js
function user(props) {
const [userList, setUserList] = useState([])
const [roleList, setRoleList] = useState([])
async function initialFetch() {
const userList = await fetch('GET', GET_ALL_USERS)
setUserList(userList)
const roleList = await fetch('GET', GET_ALL_ROLES)
setRoleList(roleList)
console.log('userList in async')
console.log(userList)
}
if (!props.userList.status) {
initialFetch()
} else {
setUserList(props.userList)
setRoleList(props.roleList)
}
console.log('userList outside')
console.log(userList)
return (
<>
<TableUserManagement users={userList} roles={roleList}/>
</>
)
};
user.getInitialProps = async (ctx) => {
const userList = await fetch('GET', GET_ALL_USERS)
const roleList = await fetch('GET', GET_ALL_ROLES)
return {userList, roleList}
}
The problem is that above async initialFetch() function is always called uninfinitively :
So what am I doing wrong here? Thank you
Note: I have tried to use useEffect() but the looping still happening. This the code :
useEffect(
() => {
if (!props.userList.status) {
initialFetch()
} else {
setUserList(props.userList)
setRoleList(props.roleList)
}
console.log('user list diliuar')
console.log(userList)
}
)
This issue is not related to Next.js but React itself. This is the code that cause unfinite calls:
if (!props.userList.status) {
initialFetch()
} else {
setUserList(props.userList)
setRoleList(props.roleList)
}
Since after setting any state, your component re-renders and that part of code keeps running again, and the fetch cause setting state again,... that loops forever.
You should move you data-fetching logic in side componentDidMount or useEffect. Remember to provide the dependency array of useEffect. In this case, you may only need to fetch data only once so you should provide the empty dependency array.
useEffect(() => {
async function initialFetch() {
const userList = await fetch('GET', GET_ALL_USERS)
setUserList(userList)
const roleList = await fetch('GET', GET_ALL_ROLES)
setRoleList(roleList)
}
if (!props.userList.status) {
initialFetch()
} else {
setUserList(props.userList)
setRoleList(props.roleList)
}
}, []);
P/s: you should name you React component in PascalCase (ex: User)
Config makes a call to the parameter store and returns a config object. I need to wait before initialising mysql.
const config = require('./config');
const mysql = require('serverless-mysql')(config);
exports.handler = (event, context) => {
// mysql stuff
}
I assume you need to wait for this to happen?
const mysql = require('serverless-mysql')(config)??
If so, then do this:
const config = require('./config');
async function mySQLStuff() {
try{
const mysql = await require('serverless-mysql')(config);
} catch (error) {
//handle error
}
return mysql;
};
exports.handler = (event, context) => {
mySQLStuff()
.then((data) => //mysql stuff)
};
I am trying to implement Firebase authentication with server-side sessions using koa, koa-session, koa-redis.
I just can't grasp it. When reading the koa-session readme, this is particularly cryptic to me (link):
You can store the session content in external stores (Redis, MongoDB or other DBs) by passing options.store with three methods (these need to be async functions):
get(key, maxAge, { rolling }): get session object by key
set(key, sess, maxAge, { rolling, changed }): set session object for key, with a maxAge (in ms)
destroy(key): destroy session for key
After asking around, I did this:
// middleware/installSession.js
const session = require('koa-session');
const RedisStore = require('koa-redis');
const ONE_DAY = 1000 * 60 * 60 * 24;
module.exports = function installSession(app) {
app.keys = [process.env.SECRET];
app.use(session({
store: new RedisStore({
url: process.env.REDIS_URL,
key: process.env.REDIS_STORE_KEY,
async get(key) {
const res = await redis.get(key);
if (!res) return null;
return JSON.parse(res);
},
async set(key, value, maxAge) {
maxAge = typeof maxAge === 'number' ? maxAge : ONE_DAY;
value = JSON.stringify(value);
await redis.set(key, value, 'PX', maxAge);
},
async destroy(key) {
await redis.del(key);
},
})
}, app));
};
Then in my main server.js file:
// server.js
...
const middleware = require('./middleware');
const app = new Koa();
const server = http.createServer(app.callback());
// session middleware
middleware.installSession(app);
// other middleware, which also get app as a parameter
middleware.installFirebaseAuth(app);
...
const PORT = parseInt(process.env.PORT, 10) || 3000;
server.listen(PORT);
console.log(`Listening on port ${PORT}`);
But then how do I access the session and its methods from inside other middlewares? Like in the installFirebaseAuth middleware, I want to finally get/set session values:
// installFirebaseAuth.js
...
module.exports = function installFirebaseAuth(app) {
...
const verifyAccessToken = async (ctx, next) => {
...
// trying to access the session, none work
console.log('ctx.session', ctx.session);
console.log('ctx.session.get():'
ctx.session.get(process.env.REDIS_STORE_KEY));
console.log('ctx.req.session', ctx.req.session);
const redisValue = await ctx.req.session.get(process.env.REDIS_STORE_KEY);
...
}
}
ctx.session returns {}
ctx.session.get() returns ctx.session.get is not a function
ctx.req.session returns undefined
Any clues?
Thanks!!
It works in my case, hope it helps you
const Koa = require('koa')
const app = new Koa()
const Router = require('koa-router')
const router = new Router()
const static = require('koa-static')
const session = require('koa-session')
// const ioredis = require('ioredis')
// const redisStore = new ioredis()
const redisStore = require('koa-redis')
const bodyparser = require('koa-bodyparser')
app.use(static('.'))
app.use(bodyparser())
app.keys = ['ohkeydoekey']
app.use(session({
key: 'yokiijay:sess',
maxAge: 1000*20,
store: redisStore()
}, app))
app.use(router.routes(), router.allowedMethods())
router.post('/login', async ctx=>{
const {username} = ctx.request.body
if(username == 'yokiijay'){
ctx.session.user = username
const count = ctx.session.count || 0
ctx.session.code = count
ctx.body = `wellcome ${username} logged in`
}else {
ctx.body = `sorry, you can't login`
}
})
router.get('/iflogin', async ctx=>{
if(ctx.session.user){
ctx.body = ctx.session
}else {
ctx.body = 'you need login'
}
})
app.listen(3000, ()=>{
console.log( 'app running' )
})
I'm having a real hard time understanding how to implement aor-realtime (trying to use it with firebase; reads only, no write).
The first place I get stuck: This library generates a saga, right? How do I connect that with a restClient/resource? I have a few custom sagas that alert me on errors, but there is a main restClient/resource backing those. Those sagas just handles some side-effects. In this case, I just don't understand what the role of the client is, and how it interacts with the generated saga (or visa-versa)
The other question is with persistence: Updates stream in and the initial set of records is not loaded in one go. Should I be calling observer.next() with each update? or cache the updated records and call next() with the entire collection to-date.
Here's my current attempt at doing the later, but I'm still lost with how to connect it to my Admin/Resource.
import realtimeSaga from 'aor-realtime';
import { client, getToken } from '../firebase';
import { union } from 'lodash'
let cachedToken
const observeRequest = path => (type, resource, params) => {
// Filtering so that only chats are updated in real time
if (resource !== 'chat') return;
let results = {}
let ids = []
return {
subscribe(observer) {
let databaseRef = client.database().ref(path).orderByChild('at')
let events = [ 'child_added', 'child_changed' ]
events.forEach(e => {
databaseRef.on(e, ({ key, val }) => {
results[key] = val()
ids = union([ key ], ids)
observer.next(ids.map(id => results[id]))
})
})
const subscription = {
unsubscribe() {
// Clean up after ourselves
databaseRef.off()
results = {}
ids = []
// Notify the saga that we cleaned up everything
observer.complete();
}
};
return subscription;
},
};
};
export default path => realtimeSaga(observeRequest(path));
How do I connect that with a restClient/resource?
Just add the created saga to the custom sagas of your Admin component.
About the restClient, if you need it in your observer, then pass it the function which return your observer as you did with path. That's actually how it's done in the readme.
Should I be calling observer.next() with each update? or cache the updated records and call next() with the entire collection to-date.
It depends on the type parameter which is one of the admin-on-rest fetch types:
CRUD_GET_LIST: you should return the entire collection, updated
CRUD_GET_ONE: you should return the resource specified in params (which should contains its id)
Here's the solution I came up with, with guidance by #gildas:
import realtimeSaga from "aor-realtime";
import { client } from "../../../clients/firebase";
import { union } from "lodash";
const observeRequest = path => {
return (type, resource, params) => {
// Filtering so that only chats are updated in real time
if (resource !== "chats") return;
let results = {}
let ids = []
const updateItem = res => {
results[res.key] = { ...res.val(), id: res.key }
ids = Object.keys(results).sort((a, b) => results[b].at - results[a].at)
}
return {
subscribe(observer) {
const { page, perPage } = params.pagination
const offset = perPage * (page - 1)
const databaseRef = client
.database()
.ref(path)
.orderByChild("at")
.limitToLast(offset + perPage)
const notify = () => observer.next({ data: ids.slice(offset, offset + perPage).map(e => results[e]), total: ids.length + 1 })
databaseRef.once('value', snapshot => {
snapshot.forEach(updateItem)
notify()
})
databaseRef.on('child_changed', res => {
updateItem(res)
notify()
})
const subscription = {
unsubscribe() {
// Clean up after ourselves
databaseRef.off();
// Notify the saga that we cleaned up everything
observer.complete();
}
};
return subscription;
}
};
}
};
export default path => realtimeSaga(observeRequest(path));