QueryParsingException failed to find geo_point field - elasticsearch

I am getting QueryParsingException[[listings] failed to find geo_point field [location.coords]]; }] and can't quite figure out why.
My Query
esClient.search({
index: 'listings',
body: {
query: {
filtered: {
query: {
match_all: {}
},
filter: {
geo_distance: {
distance: '10km',
'location.coords': {
lat: parseFloat(query.point.lat),
lon: parseFloat(query.point.lon)
}
}
}
}
}
}
}, function(err, response) {
if(err) return console.log(err);
console.log(response);
});
My mapping (as you can see -yes, I did use geo_point type)
body: {
properties: {
location: {
type: 'object',
properties: {
country: {
type: 'integer'
},
address: {
type: 'string'
},
suburb: {
type: 'string'
},
coords: {
type: 'geo_point',
precision: '3m'
}
}
}
}
}
EDIT:
After looking up http://localhost:9200/listings/_mapping/my-mapping, I noticed the coords field have the lat, lon set to double - could this have something to do with it.

Ok so it turns out this was happening because of how I was defining the geo_point's precision (needs to be wrapped in fielddata property), oddly the JS api i'm using didn't throw any kind of error that I recall:
Correct:
coords: {
type: 'geo_point',
fielddata: {
format: 'compressed',
precision: '3m'
}
}
Incorrect:
coords: {
type: 'geo_point',
precision: '3m'
}
}
And voila...

Related

How do I build a custom graphql query with support for Mongodb aggregations in Strapi?

I tried building a custom graphql query with Strapi as per below:
module.exports = {
definition: `
type flatOnts {
site_name: String
unit_no: String
firstname: String
lastName: String
description: String
isp_name: String
serial_number: String
status: Boolean
}
`,
query: `
flattenOntObj: [flatOnts]
`,
type: {},
resolver: {
Query: {
flattenOntObj: {
description: "Return a flat ont object",
resolverOf: "application::onts.onts.aggregate",
resolver: async (obj, options, ctx) => {
const res = await strapi.api.onts.services.onts.aggregate([
{
$lookup: {
from: "onts",
localField: "ont",
foreignField: "_id",
as: "ont_details",
},
},
{
$replaceRoot: {
newRoot: {
$mergeObjects: [
{
$arrayElemAt: ["$ont_details", 0],
},
"$$ROOT",
],
},
},
},
]);
console.log(res);
},
},
},
},
};
However, upon running this in the Graphql playground, I am presented with the "forbidden" error.
Any ideas or pointers?
Appreciate any assistance.
Nevermind, I got it right. I missed a section on the Strapi documentation that explains it: https://strapi.io/documentation/3.0.0-beta.x/concepts/queries.html#custom-queries
In the folder: api/model/services/model.js (my case: api/onts/services/ont.js)
module.exports = {
aggregate: async (aggArray) => {
const res = await strapi.query("ont").model.aggregate(aggArray);
return res;
},
};
and then in api/onts/config/schema.graphql.js:
module.exports = {
definition: `
type flatOnts {
site_name: String
unit_no: String
firstname: String
lastName: String
description: String
isp_name: String
serial_number: String
status: Boolean
}
`,
query: `
flattenOntObj: [flatOnts]
`,
type: {},
resolver: {
Query: {
flattenOntObj: {
description: "Return a flat ont object",
// policies: ["plugins::users-permissions.isAuthenticated"],
resolverOf: "application::onts.onts.find",
resolver: async (obj, options, ctx) => {
const aggregationArray = [
{
$lookup: {
from: "onts",
localField: "ont",
foreignField: "_id",
as: "ont_details",
},
},
{
$replaceRoot: {
newRoot: {
$mergeObjects: [
{
$arrayElemAt: ["$ont_details", 0],
},
"$$ROOT",
],
},
},
];
const res = await strapi.api.onts.services.onts.aggregate(
aggregationArray
);
return res;
},
},
},
},
};

schema type for an unknown object in graphql

I have the following object coming back from db:
{
"total_rows": 200,
"bookmark": "g1AAAABteJzLYWBgYMpgTmEQTM4vTc5ISXIwNDLXMwBCwxyQVCJDUv3___-zMpjc7D8wgEEiCx71eSwgJQ1A6j-GtiwA6MscCg",
"rows": [
{
"id": "51a1ff51b3b4719d05e40ac4bb0d0566",
"objects": {
"0": {
"type": "ipv4-addr",
"value": "192.168.1.10",
"resolves_to_refs": "2"
},
"1": {
"type": "network-traffic"
}
}
],
"counts": {
"created_by_ref": {
"0203a7e6-b174-4af9-812d-ab889816e868": 1,
"0250789a-14c3-4751-b4a0-c017af82b8f1": 1,
"03c63db6-2a84-4627-88be-a83208d524e6": 1,
"05cba3da-11ff-4a7a-aae9-0b1614cd5300": 1,
"fc825d33-26ea-4563-9478-2e1887b87112": 1
},
"file.hashes.MD5": {
"UNDEFINED": 200
},
"file.name": {
"UNDEFINED": 200
},
"ipv4_addr.value": {
"127.0.0.1": 200,
"192.168.1.10": 200
},
"last_observed": {
"1583503380000": 5,
"1583589780000": 9,
"1585749840000": 12
}
},
"num_of_rows": 10
}
I am trying to fit in a graphql schema to the above. I have the following which works paryially:
const graphql = require("graphql");
const { GraphQLObjectType, GraphQLString, GraphQLSchema, GraphQLInt, GraphQLList } = graphql;
const SearchResultType = new GraphQLObjectType({
name: "SearchResult",
fields: ()=>({
total_rows: { type: GraphQLInt },
bookmark: { type: GraphQLString },
//rows: { type: new GraphQLList(GraphQLInt) },
num_of_rows: { type: GraphQLInt }
})
});
const RootQuery = new GraphQLObjectType({
name: "RootQueryType",
fields: {
searchResult:{
type: SearchResultType,
args: { id: { type: GraphQLString } },
resolve(parentValue: any, args: any) {
console.log(args)
return resultMock;
}
}
}
});
module.exports = new GraphQLSchema({
query: RootQuery,
});
The above works for those data types which are defined. However there are some objects in the mockResult above like:
"objects": {
"0": {
"type": "ipv4-addr",
"value": "192.168.1.10",
"resolves_to_refs": "2"
},
"1": {
"type": "network-traffic"
}
or
"counts": {
"created_by_ref": {
"0203a7e6-b174-4af9-812d-ab889816e868": 1,
"0250789a-14c3-4751-b4a0-c017af82b8f1": 1,
"03c63db6-2a84-4627-88be-a83208d524e6": 1,
"05cba3da-11ff-4a7a-aae9-0b1614cd5300": 1,
"fc825d33-26ea-4563-9478-2e1887b87112": 1
So as you see these objects keys are random or at least not guessable until we receive them. Is there any way I can define a sth like this: rows: { type: new GraphQLList(any random object we do not know ) }, as a type in schema below:
const SearchResultType = new GraphQLObjectType({
name: "SearchResult",
fields: ()=>({
total_rows: { type: GraphQLInt },
bookmark: { type: GraphQLString },
rows: { type: new GraphQLList(any random object we do not know ) },
num_of_rows: { type: GraphQLInt }
})
});
You can use the GraphQL JSON Scalar (for example from this implementation). I would not recommend doing this though (in fact years ago I did a talk "GraphQL JSON Scalar considered harmful"). Instead, you might want to transform map-like objects into lists of key-value pairs.
So for example for your counts object you could do the following:
type CreatedByRef {
key: ID
count: Int
}
Object.keys(counts.created_by_ref).map(key => ({
key,
count: counts.created_by_ref[key],
}));
This will change the shape of the result but preserve all the properties of GraphQL.

Static values in GraphQL

Is there a way to produce static values in a graphql query?
For example, let's say that I have a user object with a name and email field. For some reason, I always want the status of a user to be "ACCEPTED". How can I write a query that accomplishes this?
What I want to do:
query {
user(id: 1) {
email
name
status: "ACCEPTED"
}
}
The result I want:
{
"data": {
"user": {
"email": "me#myapp.com",
"name": "me",
"status": "ACCEPTED"
}
}
}
You can make your resolve function return a static value, e.g. like this in JavaScript:
const HomeWorldType = new GraphQLObjectType({
name: 'HomeWorld',
fields: () => {
return {
id: {
type: GraphQLInt,
resolve: () => 7,
},
name: { type: GraphQLString },
climate: { type: GraphQLString },
population: { type: GraphQLString },
}
}
})

Mongoose model configuration for elasticsearch

I'm having an issue with mongoosastic in combination with KeystoneJS. I want to search for all the posts of a specific user with a match on the title. I have the following model:
var keystone = require('keystone'),
Types = keystone.Field.Types,
mongoosastic = require('mongoosastic');
User = keystone.list('User');
var Post = new keystone.List('Post');
Post.add({
title: {
type: String,
required: true,
es_indexed: true
},
state: {
type: Types.Select,
options: 'draft, published, archived',
default: 'draft',
es_indexed: true
},
author: {
type: Types.Relationship,
ref: 'User',
es_schema: User,
es_include_in_parent: true,
es_indexed:true
},
publishedDate: {
type: Types.Date,
index: true,
dependsOn: {
state: 'published'
}
},
content: {
extended: {
type: Types.Html,
wysiwyg: true,
height: 400,
es_indexed: true
}
},
});
Post.schema.plugin(mongoosastic, {
hosts: [
process.env.SEARCHBOX_SSL_URL
]
});
Post.defaultColumns = 'title, state|20%, author|20%, publishedDate|20%';
Post.register();
Post.model.createMapping(function(err, mapping) {
if (err) {
console.log('error creating mapping', err);
} else {
console.log('mapping created for Post');
}
});
User.schema.plugin(mongoosastic, {
populate: [
{path: 'author'}
]
});
I tried the approach of "Indexing Mongoose references" (https://github.com/mongoosastic/mongoosastic#indexing-mongoose-references) but I can't get it to work. Elasticsearch is not returning any hits.. My search query:
{
"query": {
"filtered": {
"query": { "match": { "title": "req.query.query" }},
"filter": { "term": { "author": "req.query.userId" }} // or author._id
}
}
}
Can someone provide me the correct Post model configuration & search query? Thanks!

GeoCode filter in mongoose elasticsearch

I am trying to do geo point filter in mongodb in meanjs. i have used mongoosastic modules but i am not able to perform geo point filter.
here below are the mongoose schema and controller code for filter.
Mongoose schema
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
mongoosastic = require('mongoosastic');
var BusinessSchema = new Schema({
name: {type: String, unique: 'Name already exists', trim: true, required: 'Name is required.', es_indexed: true},
searchTags: {type: [String], es_indexed: true},
alias: {type: Array, es_indexed: true},
// geoLocation: { type: [Number], /*/ [<longitude>, <latitude>]*/ index: '2d', /*/ create the geospatial index,*/ required: 'GeoLocation is required.', es_indexed:true,es_type:'geo_point'},
geo_cords: {
type: Array
},
address: {
address1: {type: String, required: 'Address is required', trim: true},
address2: String,
city: {type: String, required: 'City is required', trim: true},
// state: {type: String, required: 'State is required', trim: true},
country: {type: String, required: 'Country is required', trim: true},
postalCode: {type: String, required: 'Postal code is required', trim: true},
neighbourhood: String
},
isActive: {
type: Boolean,
default: true,
es_indexed: true
},
dateUpdated: {
type: Date
, es_indexed: true
},
dateCreated: {
type: Date,
default: Date.now
, es_indexed: true
}
});
controller code for filter and query
var mongoose = require('mongoose'),
Business = mongoose.model('Businesses');
var query = {
"query_string": {
"multi_match": {
"query": categoryIds.join(' OR '),
"fields": ["categoryIds", "relatedCategoryIds"]
}
},
"filter": {
"bool": {
"should": [
{"term": {"address.postalCode": "110016"}},
{"geo_distance": {
"distance": "50km",
"geo_cords": [-122.3050, 37.9174]
}
}
],
}
}
}
Business.search(query, function (err, results) {
// sendResponse(req, res, err, results)
if (!err) {
res.json(results);
} else {
res.status(400).send({message: 'Business Not Found'})
}
});
while doing this i am getting a long error saying
QueryParsingException[[businessess] failed to find geo_point field [geo_cords]
According to the documentation of mongoosastic
Geo mapping
Prior to index any geo mapped data (or calling the synchronize), the mapping must be manualy created with the createMapping (see above).
First, in your schema, define 'geo_cords' this way:
geo_cords: : {
geo_point: {
type: String,
es_type: 'geo_point',
es_lat_lon: true
},
lat: { type: Number },
lon: { type: Number }
}
Add an es_type: 'object' to each Array or embbeded type
alias: {type: Array, es_indexed: true, es_type: 'object'}
Then call .createMapping() on the model just after you've created it.

Resources