Here is my pretty code using await/async
monthlyBuckets(req, res) {
const monthlyBuckets = []
const now = DateTime.local()
let date = config.beginningOfTime
while (date < now) {
monthlyBuckets.push({
epoch: date.toMillis(),
month: date.month,
year: date.year,
actions: await redis.get(`actions_monthly_${date.year}_${date.month}`),
interested: await redis.scard(`sinterested_monthly_${date.year}_${date.month}`),
adventurous: await redis.scard(`sadventurous_monthly_${date.year}_${date.month}`),
active: await redis.scard(`sactive_monthly_${date.year}_${date.month}`),
})
date = date.plus({month: 1})
}
res.status(200).json(monthlyBuckets)
}
I like it, but making so many requests not in parallel leads to a request time close to 3 sec.
So, here is my ugly solution without async/await, just promises:
monthlyBuckets(req, res) {
const monthlyBuckets = []
const actions = []
const interested = []
const adventurous = []
const active = []
const now = DateTime.local()
let date = config.beginningOfTime
let entryCount = 0
while (date < now) {
monthlyBuckets.push({
epoch: date.toMillis(),
month: date.month,
year: date.year,
})
actions.push(redis.get(`actions_monthly_${date.year}_${date.month}`))
interested.push(redis.scard(`sinterested_monthly_${date.year}_${date.month}`))
adventurous.push(redis.scard(`sadventurous_monthly_${date.year}_${date.month}`))
active.push(redis.scard(`sactive_monthly_${date.year}_${date.month}`))
date = date.plus({month: 1})
entryCount++
}
const data = await Promise.all(actions.concat(interested).concat(adventurous).concat(active))
for (let i = 0; i < entryCount; i++) {
monthlyBuckets[i].actions = data[i]
monthlyBuckets[i].interested = data[entryCount + i]
monthlyBuckets[i].adventurous = data[entryCount * 2 + i]
monthlyBuckets[i].active = data[entryCount * 3 + i]
}
res.status(200).json(monthlyBuckets)
}
}
That ain't pretty, but it gets the job done under 200ms
Can I have pretty and efficient?
The issue with the code above is that you are trying to:
use one Promise.all() for all the promises
process the output of all the responses in one callback
Though this is not a mistake, it can lead to hard to "read" code.
The code could be written as:
while (date < now) {
let dateData = {
epoch: date.toMillis(),
month: date.month,
year: date.year,
};
let promiseData = Promise.all([
dateData, // dataData is cast(made to) automatically into a promise
redis.get(`actions_monthly_${date.year}_${date.month}`),
redis.scard(`sinterested_monthly_${date.year}_${date.month}`),
redis.scard(`sadventurous_monthly_${date.year}_${date.month}`),
redis.scard(`sactive_monthly_${date.year}_${date.month}`)
]).then([data, actions, interested, adventurous, active] => {
// process the data here for each month
data.actions = actions;
data.interested = interested;
data.adventurous = adventurous;
data.active = active;
return data;
});
monthlyBuckets.push(promiseData);
date = date.plus({month: 1});
}
const data = await Promise.all(monthlyBuckets);
res.status(200).json(data);
What changed is
Grouping the promises for each month
Processing a month `s group of promises, not all the promises together and returning the data as wanted.
There is nothing wrong with grouping promises eg:
Promise.all([
Promise.all([ ...]),
Promise.all([ ...]),
singlePromise,
...
]);
processing promises eg:
promiseProcessed1 = promise1.then(callback1);
promiseProcessed12 = Promise.all([promise1, promise2]).then(callback2);
or reusing promises eg:
promiseProcessed1 = promise1.then(callback1);
promiseProcessed12 = Promise.all([promise1, promise2]).then(callback2);
resultDatapromise = Promise.all([promise1, promise2, promiseProcessed1, promiseProcessed12]).then(callback2);
References
Reuse promises
ES6 Destructuring assignment
Taking apart different steps could help in this situation. Example:
function createBucket(date, ops){
const b = {
epoch: date.toMillis(),
month: date.month,
year: date.year,
actions: redis.get(`actions_monthly_${date.year}_${date.month}`),
interested: redis.scard(`sinterested_monthly_${date.year}_${date.month}`),
adventurous: redis.scard(`sadventurous_monthly_${date.year}_${date.month}`),
active: redis.scard(`sactive_monthly_${date.year}_${date.month}`),
}
const promised = ['actions','interested', 'adventurous', 'active'];
promised.forEach(p => ops.push(async () => {b[p] = await b[p]}));
}
async function monthlyBuckets(req,res){
const monthlyBuckets = []
const now = DateTime.local()
let date = config.beginningOfTime
const ops = [];
while (date < now) {
monthlyBuckets.push(createBucket(date,ops));
date = date.plus({month: 1})
}
await Promise.all(ops);
res.status(200).json(monthlyBuckets)
}
Related
hopefully, you guys will be able to help me out please.
I can't get the pagination to work properly. It always counts the total documents and ignores the filter data. For example, there are 24 total documents, but when filters by a particular item it returns one, but still returns the total amount of pages (which is 3 as I have pageSize set to 9).
Please find my code below:
router.get('/', async (req, res) => {
try {
const pageSize = 9;
const page = Number(req.query.page) || 1;
let query;
const queryObject = { ...req.query };
const excludeFields = ['page', 'sort', 'limit', 'fields'];
excludeFields.forEach((el) => delete queryObject[el]);
let queryString = JSON.stringify(queryObject);
queryString = queryString.replace(
/\b(gte|gt|lte|lt)\b/g,
(match) => `$${match}`
);
query = Vehicle.find(JSON.parse(queryString));
if (req.query.sort) {
const sortBy = req.query.sort.split(',').join(' ');
query = query.sort(sortBy);
} else {
query = query.sort('-createdAt');
}
const count = await Vehicle.countDocuments();
const vehicles = await Vehicle.find(JSON.parse(queryString))
.limit(pageSize)
.skip(pageSize * (page - 1));
if (!vehicles) {
return res.status(200).json({ success: true, data: [] });
}
res
.status(200)
.json({ vehicles, page, totalPages: Math.ceil(count / pageSize) });
} catch (error) {
console.error(error.message);
res.status(500).send('Server Error');
}
});
How would you go about adding functioning sorting into this also please, for some reason sorting doesn't work at all for me?
Thanks very much, G.
To get the count to include the filtered data you need to add the query parameter in the countDocuments:
const count = await Vehicle.countDocuments(JSON.parse(queryString));
To add sorting functionality you can append .sort() onto the end of you find query:
const vehicles = await Vehicle.find(JSON.parse(queryString))
.limit(pageSize)
.skip(pageSize * (page - 1))
.sort(<your sort query>)
https://www.mongodb.com/community/forums/t/sorting-with-mongoose-and-mongodb/122573
If I fire the following function 10 times
createInterval() {
var someInterval = interval(10000).pipe( take(1)).subscribe(
_intervalValue =>
{
console.log(" Interval Fired" + new Date().toISOString());
});
}
I get 10 intervals that will console log 10 times. How do I know that I have 10 intervals?
Where can I access these, it's like they exist mysteriously somewhere.
How do I know that I have 10 intervals?
Let's simplify using numbers instead of Observables:
function createNumber() {
const someNumber = Math.random();
console.log(`Number Created: ${someNumber}`);
}
for(let x=0; x<10; x++) {
createNumber();
}
And re-ask the same question: "How do I know that I have 10 numbers?"
Well... you don't. Not unless you're saving a reference to them!
So let's have the function return a reference to the number:
function createNumber() {
const someNumber = Math.random();
console.log(`Number Created: ${someNumber}`);
return someNumber;
}
let myNumbers = [];
for(let x=0; x<10; x++) {
myNumbers.push(createNumber());
}
console.log(`I know I have ${myNumbers.length} numbers!`);
This behavior is no different for observables. If you want know you have 10 intervals, you need to keep track of them:
function createInterval() {
return interval(1000)
.pipe(take(1))
.subscribe(
() => console.log(`Interval Fired: ${ new Date().toISOString() }`)
);
}
let mySubscriptions: Subscription[] = [];
for(let x=0; x<10; x++) {
mySubscriptions.push(createInterval());
}
mySubscriptions.forEach(
sub => sub.unsubscribe()
);
Note: by calling .subscribe() you are returning a Subscription, not an Observable. It is often convenient to have functions return the observable, and let consumers of the function call .subscribe():
function createInterval() {
return interval(1000).pipe(
take(1),
tap(() => console.log(`Interval Fired: ${ new Date().toISOString() }`))
);
}
let myObservables: Observable<number>[] = [];
for(let x=0; x<10; x++) {
myObservables.push(createInterval());
}
const mySubscriptions = myObservables.map(
obs => obs.subscribe()
);
// then later on, you can unsubscribe
mySubscriptions.forEach(
sub => sub.unsubscribe()
);
I have the next code, and it was working properly. to execute a request to my method fetchDropdownDataByFederationId, but now I have a requirement to execute the same method x number of times.
fetchInProgress(queryString?): Observable<IPerson[]> {
let PersonList: IPerson[] = [];
return this.getItems<IPerson[]>('', queryString).pipe(
take(1),
switchMap((wls: IPerson[]) => {
PersonList = [...wls];
//const createdbyIds = [...new Set(wls.map((f) => f.createdBy))];
return this.teamPageService.getInformation(wls.createdBy);
}),
map((teams:any) => {
console.log('> teams', teams);
for (let i = 0; i < PersonList.length; i++) {
//update information
}
//console.log('> Final value: ', PersonList);
return PersonList;
})
);
}
But, I'm not finding a way to execute my SwitchMap x number of times and get the results back to use them in my map method to parse the information.
I just moved my SwitchMap to mergeMap, something like this:
mergeMap((wls: IWalklist[]) => {
//let allIds = wls.contact.map(id => this.getSingleData(id._id) );
let drops: Dropdown[] = [];
walklistList = [...wls];
const allIds = [...new Set(wls.map((f) => f.createdBy))];
return forkJoin(...allIds).pipe(
map((idDataArray) => {
drops.push(
this.teamPageService.getInformation('');
);
return drops;
})
)
}),
But still no luck.
Can some help me? how can I fix it?
I'm working on a service layer that manages subscriptions.
I provide subject-backed observables to consumers like this:
const subject = new Subject();
_trackedSubjects.push(subject);
return subject.asObservable();
Different consumers may monitor the channel, so there may be several observables attached to each subject.
I'd like to monitor the count of subject.observers and if it ever drops back to 0, do some cleanup in my library.
I have looked at refCount, but this only is available on Observable.
I'd love to find something like:
subject.onObserverCountChange((cur, prev) =>
if(cur === 0 && prev !== 0) { cleanUp(subject) }
)
Is there a way to automatic cleanup like this on a subject?
Instead of using Subject - you should probably describe setup/cleanup logic when creating observable. See the example:
const { Observable } = rxjs; // = require("rxjs")
const { share } = rxjs.operators; // = require("rxjs/operators")
const eventSource$ = Observable.create(o => {
console.log('setup');
let i = 0
const interval = setInterval(
() => o.next(i++),
1000
);
return () => {
console.log('cleanup');
clearInterval(interval);
}
});
const events$ = eventSource$.pipe(share());
const first = events$.subscribe(e => console.log('first: ', e));
const second = events$.subscribe(e => console.log('second: ', e));
setTimeout(() => first.unsubscribe(), 3000);
setTimeout(() => second.unsubscribe(), 5000);
<script src="https://unpkg.com/rxjs#6.2.2/bundles/rxjs.umd.min.js"></script>
have an observable that returns arrays/lists of things: Observable
And I have a usecase where is is a pretty costly affair for the downstream consumer of this observable to have more items added to this list. So I'd like to slow down the amount of additions that are made to this list, but not loose any.
Something like an operator that takes this observable and returns another observable with the same signature, but whenever a new list gets pushed on it and it has more items than last time, then only one or a few are added at a time.
So if the last push was a list with 3 items and next push has 3 additional items with 6 items in total, and the batch size is 1, then this one list push gets split into 3 individual pushes of lists with lengths: 4, 5, 6
So additions are batched, and this way the consumer can more easily keep up with new additions to the list. Or the consumer doesn't have to stall for so long each time while processing additional items in the array/list, because the additions are split up and spread over a configurable size of batches.
I made an addAdditionalOnIdle operator that you can apply to any rxjs observable using the pipe operator. It takes a batchSize parameter, so you can configure the batch size. It also takes a dontBatchAfterThreshold, which stops batching of the list after a certain list size, which was useful for my purposes. The result also contains a morePending value, which you can use to show a loading indicator while you know more data is incomming.
The implementation uses the new requestIdleCallback function internally to schedule the batched pushes of additional items when there is idle time in the browser. This function is not available in IE or Safari yet, but I found this excelent polyfill for it, so you can use it today anyways: https://github.com/aFarkas/requestIdleCallback :)
See the implementation and example usage of addAdditionalOnIdle below:
const { NEVER, of, Observable } = rxjs;
const { concat } = rxjs.operators;
/**
* addAdditionalOnIdle
*
* Only works on observables that produce values that are of type Array.
* Adds additional elements on window.requestIdleCallback
*
* #param batchSize The amount of values that are added on each idle callback
* #param dontBatchAfterThreshold Return all items after amount of returned items passes this threshold
*/
function addAdditionalOnIdle(
batchSize = 1,
dontBatchAfterThreshold = 22,
) {
return (source) => {
return Observable.create((observer) => {
let idleCallback;
let currentPushedItems = [];
let lastItemsReceived = [];
let sourceSubscription = source
.subscribe({
complete: () => {
observer.complete();
},
error: (error) => {
observer.error(error);
},
next: (items) => {
lastItemsReceived = items;
if (idleCallback) {
return;
}
if (lastItemsReceived.length > currentPushedItems.length) {
const idleCbFn = () => {
if (currentPushedItems.length > lastItemsReceived.length) {
observer.next({
morePending: false,
value: lastItemsReceived,
});
idleCallback = undefined;
return;
}
const to = currentPushedItems.length + batchSize;
const last = lastItemsReceived.length;
if (currentPushedItems.length < dontBatchAfterThreshold) {
for (let i = 0 ; i < to && i < last ; i++) {
currentPushedItems[i] = lastItemsReceived[i];
}
} else {
currentPushedItems = lastItemsReceived;
}
if (currentPushedItems.length < lastItemsReceived.length) {
idleCallback = window.requestIdleCallback(() => {
idleCbFn();
});
} else {
idleCallback = undefined;
}
observer.next({
morePending: currentPushedItems.length < lastItemsReceived.length,
value: currentPushedItems,
});
};
idleCallback = window.requestIdleCallback(() => {
idleCbFn();
});
} else {
currentPushedItems = lastItemsReceived;
observer.next({
morePending: false,
value: currentPushedItems,
});
}
},
});
return () => {
sourceSubscription.unsubscribe();
sourceSubscription = undefined;
lastItemsReceived = undefined;
currentPushedItems = undefined;
if (idleCallback) {
window.cancelIdleCallback(idleCallback);
idleCallback = undefined;
}
};
});
};
}
function sleep(milliseconds) {
var start = new Date().getTime();
for (var i = 0; i < 1e7; i++) {
if ((new Date().getTime() - start) > milliseconds){
break;
}
}
}
let testSource = of(
[1,2,3],
[1,2,3,4,5,6],
).pipe(
concat(NEVER)
);
testSource
.pipe(addAdditionalOnIdle(2))
.subscribe((list) => {
// Simulate a slow synchronous consumer with a busy loop sleep implementation
sleep(1000);
document.body.innerHTML += "<p>" + list.value + "</p>";
});
<script src="https://unpkg.com/rxjs#6.5.3/bundles/rxjs.umd.js"></script>