Send partial list with errors in GraphQL - graphql

With a query that returns a list:
{
users(first: 10) {
messages(first: 10) {
foo
}
}
}
and a resolver for messages -
const errors = [];
async function messagesResolver(user, {first}, ctx, info) {
const messages = await Promise.all(
user.messages.map(
messageId => fetch(messageId).catch(e => {
// Collect error
errors.push(err);
// Ignore this failed message id
return null;
})
)
);
// TODO:
// How do I add errors to the list of errors
// sent to the client
return messages.filter(m => m != null);
}
Is it possible to return only a Partial List of messages whose fetch succeeded? Is it possible send the errors along and let the client decide how to handle the errors?
Replacing message with an Error does this to an extent, but with null values in the resolved list -
messages = [ { 1 }, { 2 }, new Error(3), { 4 }, new Error(5) ]
resolves to
data = [ { 1 }, { 2 }, null, { 4 }, null ]
errors = [ Error(3), Error(5) ]
But, is it possible to send the errors via another API (say, info.addError(new Error(3)) to achieve the following result?
data = [ { 1 }, { 2 }, { 4 } ]
errors = [ Error(3), Error(5) ]

Related

Mongodb aggregation remove null values from object with nested properties

Is there a way to remove literally all null or empty string values from an object? We have an aggregation which creates an object with empty fields and empty objects should the value be null.
What we wish to do is remove all null properties and empty objects and recreate the object, in order to keep the data as small as possible.
e.g. in the following object, only 'test' and 'more-nested-data' should be taken into account, the rest can be removed
{
"test": "some",
"test2": {
},
"test3": {
"some-key": {
},
"some-other-key": {
"more-nested-data": true,
"more-nested-emtpy": null
}
}
}
which should become:
{
"test": "some",
"test3": {
"some-other-key": {
"more-nested-data": true
}
}
}
I tried a lot, but I think by using objectToArray that something could be done, but I have not found the solution yet. The required aggregation should need to recursively (or by defined levels) remove null properties and empty objects.
Use the $function operator available in 4.4 (Aug 2021) to do this recursively as you note. Given this input which is a slightly expanded version of that supplied in the question:
var dd = {
"test": "some",
"test2": { },
"test3": {
"some-key": { },
"some-other-key": {
"more-nested-data": true,
"more-nested-emtpy": null,
"emptyArr": [],
"notEmptyArr": [
"XXX",
null,
{"corn":"dog"},
{"bad":null},
{"other": {zip:null, empty:[], zap:"notNull"}}
]
}
}
}
db.foo.insert(dd);
then this pipeline:
db.foo.aggregate([
{$replaceRoot: {newRoot: {$function: {
body: function(obj) {
var process = function(holder, spot, value) {
var remove_it = false;
// test FIRST since [] instanceof Object is true!
if(Array.isArray(value)) {
// walk BACKWARDS due to potential splice() later
// that will change the length...
for(var jj = value.length - 1; jj >= 0; jj--) {
process(value, jj, value[jj]);
}
if(0 == value.length) {
remove_it = true;
}
} else if(value instanceof Object) {
walkObj(value);
if(0 == Object.keys(value).length) {
remove_it = true;
}
} else {
if(null == value) {
remove_it = true;
}
}
if(remove_it) {
if(Array.isArray(holder)) {
holder.splice(spot,1); // snip out the val
} else if(holder instanceof Object) {
delete holder[spot];
}
}
};
var walkObj = function(obj) {
Object.keys(obj).forEach(function(k) {
process(obj, k, obj[k]);
});
}
walkObj(obj); // entry point!
return obj;
},
args: [ "$$CURRENT" ],
lang: "js"
}}
}}
]);
produces this result:
{
"_id" : 0,
"test" : "some",
"test3" : {
"some-other-key" : {
"more-nested-data" : true,
"notEmptyArr" : [
"XXX",
{
"corn" : "dog"
},
{
"other" : {
"zap" : "notNull"
}
}
]
}
}
}
A convenient way to debug such complex functions is by declaring them as variables outside of the pipeline and running data through them to simulate the documents (objects) coming out the database, e.g.:
ff = function(obj) {
var process = function(holder, spot, value) {
var remove_it = false;
// test FIRST since [] instanceof Object is true!
if(Array.isArray(value)) {
...
printjson(ff(dd)); // use the same doc as above
You can put print and other debugging aids into the code and then when you are done, you can remove them and call the pipeline to process the real data as follows:
db.foo.aggregate([
{$replaceRoot: {newRoot: {$function: {
body: ff, // substitute here!
args: [ "$$CURRENT" ],
lang: "js"
}}
}}
]);
Sounds like the unwind operator would help. Checkout the unwind operator at https://docs.mongodb.com/manual/reference/operator/aggregation/unwind/

Remove null results from a array that can contain nullable values in GraphQL

I have a query in my app that works but response is little ugly, there is probably two ways to solve this:
Write resolver differently
Clean response from null values
Here is resolver:
t.list.field('manyDevices', {
type: 'Device',
description: 'Get list of devices belonging to user',
args: {
input: nonNull(deviceIdentifierInput.asArg()),
},
resolve: async (_, { input: { id } }, { prisma }) => {
return await prisma.device.findMany({ where: { userId: id } });
},
});
This resolver looks for all devices with provided id. Id can be mine and also can be from a some other user. Devices can be public and private, and I don't want to receive private devices except if they are mine.
const isDevicePublic = rule({ cache: 'strict' })(
async ({ isPublic }: Device) => {
if (!isPublic) {
return permissionErrors.noPermission;
}
return true;
},
);
const isDeviceOwner = rule({ cache: 'strict' })(
async ({ userId }: Device, _, { user }: Context) => {
assertValue(user, permissionErrors.noAuthentication);
if (userId !== user.id) {
return permissionErrors.noPermission;
}
return true;
},
);
These are rules that I place on my schema with graphql-shield library and it works. There is just one problem, if a user have a private device it will be listed in response array as null and graphql-shield will throw error, so response can look like this:
{
"errors": [
{
"message": "You have no permission to access this resource",
"locations": [
{
"line": 3,
"column": 5
}
],
"path": [
"manyDevices",
0,
"name"
],
"extensions": {
"code": "INTERNAL_SERVER_ERROR",
"exception": {
"stacktrace": [
"Error: You have no permission to access this resource",
" at Rule.resolve (/workspace/node_modules/graphql-shield/dist/rules.js:33:24)",
" at runMicrotasks (<anonymous>)",
" at processTicksAndRejections (internal/process/task_queues.js:93:5)",
" at async Promise.all (index 0)"
]
}
}
}
],
"data": {
"manyDevices": [
null,
{
"name": "device-01"
}
]
}
}
So there is one fetched device and other that is private that throws this error, can I somehow remove null and error response or should I filter them out in resolver?

Is there any cost advantage of Parse.Object.saveAll vs. saving individually?

The Parse JS SDK provides a Parse.Object.saveAll() method to save many objects with one command.
From looking at ParseServerRESTController.js it seems that each object is saved individually:
if (path === '/batch') {
let initialPromise = Promise.resolve();
if (data.transaction === true) {
initialPromise = config.database.createTransactionalSession();
}
return initialPromise.then(() => {
const promises = data.requests.map(request => {
return handleRequest(
request.method,
request.path,
request.body,
options,
config
).then(
response => {
return {
success: response
};
},
error => {
return {
error: {
code: error.code,
error: error.message
},
};
}
);
});
return Promise.all(promises).then(result => {
if (data.transaction === true) {
if (
result.find(resultItem => typeof resultItem.error === 'object')
) {
return config.database.abortTransactionalSession().then(() => {
return Promise.reject(result);
});
} else {
return config.database.commitTransactionalSession().then(() => {
return result;
});
}
} else {
return result;
}
});
});
}
It seems that saveAll is merely a convenience wrapper around saving each object individually, so it still does seem to make n database requests for n objects.
It it correct that saveAll has no cost advantage (performance, network traffic, etc) vs. saving each object individually in Cloud Code?
I can tell you that the answer is that Parse.Object.saveAll and Parse.Object.destroyAll batch requests by default in batches of 20 objects. But why take my word for it? Let's test it out!
Turn verbose logging on and then run the following:
const run = async function run() {
const objects = [...Array(10).keys()].map(i => new Parse.Object('Test').set({i}));
await Parse.Object.saveAll(objects);
const promises = objects.map(o => o.increment('i').save());
return Promise.all(promises);
};
run()
.then(console.log)
.catch(console.error);
And here's the output from the parse-server logs (I've truncated it, but it should be enough to be apparent what is going on):
verbose: REQUEST for [POST] /parse/batch: { // <--- note the path
"requests": [ // <--- an array of requests!!!
{
"method": "POST",
"body": {
"i": 0
},
"path": "/parse/classes/Test"
},
... skip the next 7, you get the idea
{
"method": "POST",
"body": {
"i": 9
},
"path": "/parse/classes/Test"
}
]
}
.... // <-- remove some irrelevent output for brevity.
verbose: RESPONSE from [POST] /parse/batch: {
"response": [
{
"success": {
"objectId": "szVkuqURVq",
"createdAt": "2020-03-05T21:25:44.487Z"
}
},
...
{
"success": {
"objectId": "D18WB4Nsra",
"createdAt": "2020-03-05T21:25:44.491Z"
}
}
]
}
...
// now we iterate through and there's a request per object.
verbose: REQUEST for [PUT] /parse/classes/Test/szVkuqURVq: {
"i": {
"__op": "Increment",
"amount": 1
}
}
...
verbose: REQUEST for [PUT] /parse/classes/Test/HtIqDIsrX3: {
"i": {
"__op": "Increment",
"amount": 1
}
}
// and the responses...
verbose: RESPONSE from [PUT] /parse/classes/Test/szVkuqURVq: {
"response": {
"i": 1,
"updatedAt": "2020-03-05T21:25:44.714Z"
}
}
...
In the core manager code, you do correctly identify that we are making a request for each object to the data store (i.e. MongoDB), This is necessary because an object may have relations or pointers that have to be handled and that may require additional calls to the data store.
BUT! calls between the parse server and the data store are usually over very fast networks using a binary format, whereas calls between the client and the parse server are JSON and go over longer distances with ordinarily much slower connections.
There is one other potential advantage that you can see in the core manager code which is that the batch is done in a transaction.

vue-rx: how to watch value of object from an array is not change anymore?

"vue-rx": "^6.1.0",
"rxjs": "^6.4.0",
"vue": "^2.5.17",
I'm new in vue-rx and rxjs,But when I see several demo of rx, I'm quite interested in this.So I want to use it in my project which posts a request when attribute num will not change anymore
[
{
id: 0,
name: 'giftA',
num: 0 // will turn to 1,2,3,4,5,...after running `send({id: 0})` function 1,2,3,4,5,...times
},
{
id: 1,
name: 'giftB',
num: 0
},
...
]
And Here is my solution:
using $watchAsObservable to watch the change of sendCalledTimes, and then using mergeMap to post the request.
the variable sendCalledTimes is a number which will sendCalledTimes++ when called send function, And after posting the request, reset this to sendCalledTimes = 0.
So that $watchAsObservable('sendCalledTimes')(vue-rx) will execute every three seconds, and will reduce request times in my project. But i think it's still not good because it just like a timer and can't watch weather num of each object in the Array changes. The good example should be like this search example.
data() {
return {
sendCalledTimes: 0,
giftArr: []
}
},
created() {
this.$watchAsObservable('sendCalledTimes').pipe(
pluck('newValue'),
filter(val => val > 0),
debounceTime(3000),
// if `sendCalledTimes` is the same number as previous
// will not execute follows
// distinctUntilChanged(),
mergeMap(
(val) => this.requestSendGift()
),
).subscribe(
(val) => { }
)
},
methods: {
send (obj) {
let pushFlag = true
for (const gift in this.giftArr) {
if (gift.id === obj.id) {
gift.num++
pushFlag = false
break
}
}
if (pushFlag) {
this.giftArr.push(obj)
}
// observable
this.sendCalledTimes++
},
async requestSendGift () {
for (const gift in this.giftArr) {
// example for post a request to store each gift
await axios({
data: gift,
type: 'post',
url: '...'
}).then(res => { ... })
}
// reset `this.sendCalledTimes`
this.sendCalledTimes = 0
}
}
Also since vue-rx doesn't have many examples on github, so i need help to solve creating good subscription for this situation.
I have tried this, but failed:
data () {
return {
giftArr: []
}
},
subscriptions: {
test: from(this.giftArr) // console.log(this.$observables.test) throw an error: typeError: Cannot read property 'giftArr' of undefined
},
It would be greatly appreciated if anyone can help me to solve this question.
It's a little unclear from your question exactly what you're trying to do, but I've created an example based on what I believe to be your intent.
I made some assumptions:
You have a 'gifts' array that represents all of the gifts that will ever exist.
You want to make updates to that array.
Every time you make an update to the array, you want to see the update in the form of an Observable emitting an event.
Use a Subject
I think what you want is a Subject.
const gift$ = new Subject();
Make it Emit on Updates
And you would set it up to emit every time you increment num or add a new gift.
function addGift(gift) {
gifts.push(gift);
gift$.next(gift);
}
function incrementGift(gift) {
gift.num++;
gift$.next(gift);
}
All together it could look something like this:
import { Subject } from 'rxjs';
const gift$ = new Subject();
const gifts = [{ id: 0, name: 'giftA', num: 0 }, { id: 1, name: 'giftB', num: 0 }];
function addGift(gift) {
gifts.push(gift);
gift$.next(gift);
}
function incrementGift(gift) {
gift.num++;
gift$.next(gift);
}
function sendGift(newGift) {
const currentGift = gifts.find(g => g.id === newGift.id);
currentGift ? incrementGift(currentGift) : addGift(newGift);
}
gift$.subscribe(update => {
console.log(gifts);
console.log(update);
});
// You should see an initial logging of 'gifts' and update will be 'undefined' at first. Then you'll see a log for every 'sendGift'.
sendGift({ id: 0 });
sendGift({ id: 3, name: 'giftC', num: 0 });
StackBlitz

Graphql returning Cannot return null for non-nullable field Query.getDate. As I am new to graphql I want to know is my approach is wrong or my code?

I have created resolver, schema and handler which will fetch some record from dynamoDB. Now when I perform query then I am getting "Cannot return null for non-nullable field Query.getDate" error. I would like to know whether my approach is wrong or there is any change required in code.
My code : https://gist.github.com/vivek-chavan/95e7450ff73c8382a48fb5e6a5b96025
Input to lambda :
{
"query": "query getDate {\r\n getDate(id: \"0f92fa40-8036-11e8-b106-952d7c9eb822#eu-west-1:ba1c96e7-92ff-4d63-879a-93d5e397b18a\") {\r\n id\r\n transaction_date\r\n }\r\n }"
}
Response :
{
"errors": [
{
"message": "Cannot return null for non-nullable field Query.getDate.",
"locations": [
{
"line": 2,
"column": 7
}
],
"path": [
"getDate"
]
}
],
"data": null
}
Logs of lambda function :
[ { Error: Cannot return null for non-nullable field Query.getDate.
at completeValue (/var/task/node_modules/graphql/execution/execute.js:568:13)
at completeValueCatchingError (/var/task/node_modules/graphql/execution/execute.js:503:19)
at resolveField (/var/task/node_modules/graphql/execution/execute.js:447:10)
at executeFields (/var/task/node_modules/graphql/execution/execute.js:293:18)
at executeOperation (/var/task/node_modules/graphql/execution/execute.js:237:122)
at executeImpl (/var/task/node_modules/graphql/execution/execute.js:85:14)
at execute (/var/task/node_modules/graphql/execution/execute.js:62:229)
at graphqlImpl (/var/task/node_modules/graphql/graphql.js:86:31)
at /var/task/node_modules/graphql/graphql.js:32:223
at graphql (/var/task/node_modules/graphql/graphql.js:30:10)
message: 'Cannot return null for non-nullable field Query.getDate.',
locations: [Object],
path: [Object] } ],
data: null }
2019-02-25T10:07:16.340Z 9f75d1ea-2659-490b-ba59-5289a5d18d73 { Item:
{ model: 'g5',
transaction_date: '2018-07-05T09:30:31.391Z',
id: '0f92fa40-8036-11e8-b106-952d7c9eb822#eu-west-1:ba1c96e7-92ff-4d63-879a-93d5e397b18a',
make: 'moto' } }
Thanks in advance!
This is your code:
const data = {
getDate(args) {
var params = {
TableName: 'delete_this',
Key: {
"id": args.id
}
};
client.get(params, function(err,data){
if(err){
console.log('error occured '+err)
}else{
console.log(data)
}
});
},
};
const resolvers = {
Query: {
getDate: (root, args) => data.getDate(args),
},
};
You're seeing that error because getDate is a a Non-Null field in your schema, but it is resolving to null. Your resolver needs to return either a value of the appropriate type, or a Promise that will resolve to that value. If you change data like this
const data = {
getDate(args) {
return {
id: 'someString',
transaction_date: 'someString',
}
}
}
you'll see the error go away. Of course, your goal is to return data from your database, so we need to add that code back in. However, your existing code utilizes a callback. Anything you do inside the callback is irrelevant because it's ran after your resolver function returns. So we need to use a Promise instead.
While you can wrap a callback with Promise, that shouldn't be necessary with aws-sdk since newer versions support Promises. Something like this should be sufficient:
const data = {
getDate(args) {
const params = //...
// must return the resulting Promise here
return client.get(params).promise().then(result => {
return {
// id and transaction_date based on result
}
})
}
}
Or using async/await syntax:
const data = {
async getDate(args) {
const params = //...
const result = await client.get(params).promise()
return {
// id and transaction_date based on result
}
}
}

Resources