GeoCode filter in mongoose elasticsearch - elasticsearch

I am trying to do geo point filter in mongodb in meanjs. i have used mongoosastic modules but i am not able to perform geo point filter.
here below are the mongoose schema and controller code for filter.
Mongoose schema
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
mongoosastic = require('mongoosastic');
var BusinessSchema = new Schema({
name: {type: String, unique: 'Name already exists', trim: true, required: 'Name is required.', es_indexed: true},
searchTags: {type: [String], es_indexed: true},
alias: {type: Array, es_indexed: true},
// geoLocation: { type: [Number], /*/ [<longitude>, <latitude>]*/ index: '2d', /*/ create the geospatial index,*/ required: 'GeoLocation is required.', es_indexed:true,es_type:'geo_point'},
geo_cords: {
type: Array
},
address: {
address1: {type: String, required: 'Address is required', trim: true},
address2: String,
city: {type: String, required: 'City is required', trim: true},
// state: {type: String, required: 'State is required', trim: true},
country: {type: String, required: 'Country is required', trim: true},
postalCode: {type: String, required: 'Postal code is required', trim: true},
neighbourhood: String
},
isActive: {
type: Boolean,
default: true,
es_indexed: true
},
dateUpdated: {
type: Date
, es_indexed: true
},
dateCreated: {
type: Date,
default: Date.now
, es_indexed: true
}
});
controller code for filter and query
var mongoose = require('mongoose'),
Business = mongoose.model('Businesses');
var query = {
"query_string": {
"multi_match": {
"query": categoryIds.join(' OR '),
"fields": ["categoryIds", "relatedCategoryIds"]
}
},
"filter": {
"bool": {
"should": [
{"term": {"address.postalCode": "110016"}},
{"geo_distance": {
"distance": "50km",
"geo_cords": [-122.3050, 37.9174]
}
}
],
}
}
}
Business.search(query, function (err, results) {
// sendResponse(req, res, err, results)
if (!err) {
res.json(results);
} else {
res.status(400).send({message: 'Business Not Found'})
}
});
while doing this i am getting a long error saying
QueryParsingException[[businessess] failed to find geo_point field [geo_cords]

According to the documentation of mongoosastic
Geo mapping
Prior to index any geo mapped data (or calling the synchronize), the mapping must be manualy created with the createMapping (see above).
First, in your schema, define 'geo_cords' this way:
geo_cords: : {
geo_point: {
type: String,
es_type: 'geo_point',
es_lat_lon: true
},
lat: { type: Number },
lon: { type: Number }
}
Add an es_type: 'object' to each Array or embbeded type
alias: {type: Array, es_indexed: true, es_type: 'object'}
Then call .createMapping() on the model just after you've created it.

Related

model.hasMany called with something that's not a subclass of Sequelize.Model

I am getting the below error whenever I tried to call any call from serverless framework lambda
[offline] _____ HANDLER RESOLVED _____
offline: Failure: product.hasMany called with something that's not a subclass of Sequelize.Model
Error: product.hasMany called with something that's not a subclass of Sequelize.Model
at Function.hasMany (C:\Users\Kiran\Documents\Projects\Rentals-Backend\node_modules\sequelize\lib\associations\mixin.js:18:13)
at Function.Product.associate (C:\Users\Kiran\Documents\Projects\Rentals-Backend\entity\product.js:21:17)
IMPORTANT
Below code is the answer for the above error. You might missed any steps. So you can refer and fix. Thanks Anatoly who helped me to solve the problem.
Product model:
const { STRING, BOOLEAN, INTEGER } = require("sequelize");
module.exports = (sequelize, DataTypes) => {
const Product = sequelize.define("product", {
id: { type: INTEGER, primaryKey: true, autoIncrement: true },
name: { type: STRING },
description: { type: STRING, allowNull: true },
purchase_price: { type: STRING },
tax: { type: STRING },
sale_price: { type: STRING },
categoryId: { type: STRING },
status: { type: BOOLEAN, defaultValue: 0 },
created_on: { type: INTEGER, allowNull: true },
updated_on: { type: INTEGER, allowNull: true },
}, {
timestamps: false,
freezeTableName: true,
})
Product.associate = function (models) {
Product.hasMany(models.product_image, { as: "images" });
Product.belongsTo(models.product_category, { as: "category", foreignKey: 'categoryId' });
};
return Product;
}
Image model:
const { STRING, BOOLEAN, INTEGER } = require("sequelize");
module.exports = (sequelize, DataTypes) => {
const ProductImage = sequelize.define("product_image", {
id: { type: INTEGER, primaryKey: true, autoIncrement: true },
productId: { type: INTEGER },
fileName: { type: STRING },
url: { type: STRING },
position: { type: INTEGER },
isDefault: { type: BOOLEAN, defaultValue: 0 },
shopId: { type: STRING },
status: { type: BOOLEAN, defaultValue: 0 },
created_on: { type: INTEGER, allowNull: true },
updated_on: { type: INTEGER, allowNull: true },
}, {
timestamps: false,
freezeTableName: true,
})
return ProductImage;
}
Category model:
const { STRING, BOOLEAN, INTEGER } = require("sequelize");
module.exports = (sequelize, DataTypes) => {
const ProductCategory = sequelize.define("product_category", {
id: { type: INTEGER, primaryKey: true, autoIncrement: true },
name: { type: STRING },
description: { type: STRING, allowNull: true },
status: { type: BOOLEAN, defaultValue: 0 },
created_on: { type: INTEGER, allowNull: true },
updated_on: { type: INTEGER, allowNull: true },
}, {
timestamps: false,
freezeTableName: true,
});
return ProductCategory;
}
This is the config file where we initialize sequelize
Config file
const Sequelize = require('sequelize')
const fs = require('fs')
const path = require('path')
const db = {}
const models = path.join(__dirname, '..', 'entity')
var basename = path.basename(module.filename)
const sequelize = new Sequelize(
process.env.DB_NAME,
process.env.DB_USER,
process.env.DB_PASSWORD,
{
dialect: 'mysql',
host: process.env.DB_HOST,
port: process.env.DB_PORT,
logging: false
}
)
fs
.readdirSync(models)
.filter(function (file) {
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js')
})
.forEach(function (file) {
var model = require(path.join(models, file))(
sequelize,
Sequelize.DataTypes
);
db[model.name] = model;
})
Object.keys(db).forEach(function (modelName) {
if (db[modelName].associate) {
db[modelName].associate(db)
}
})
db.Sequelize = Sequelize
db.sequelize = sequelize
module.exports = db
Here we are calling product details.
Calling function
const db = require('../config/sequelize-config');
exports.getProductById = (query, username, shopId) => {
return new Promise((resolve, reject) => {
db.product.findOne({
where: {
id: query.id
},
attributes: ['id', 'name', 'description', ['purchase_price', 'purchasePrice'], 'tax', ['sale_price', 'salePrice']],
include: [{
model: db.product_image,
as: 'images',
where: {
status: 1
},
required: false,
attributes: ['id', 'fileName', 'position', 'url']
},
{
model: db.product_category,
as: 'category',
required: false,
attributes: ['id', 'name']
}]
}).then(product => {
if (product) {
resolve({ [KEY_STATUS]: 1, [KEY_MESSAGE]: "Product details fetched successfully", [KEY_DATA]: product });
} else {
reject({ [KEY_STATUS]: 0, [KEY_MESSAGE]: "Product details fetch failed" });
}
}).catch(error => {
reject({ [KEY_STATUS]: 0, [KEY_MESSAGE]: "Product details fetch failed", [KEY_ERROR]: error.message });
});
})
}
To avoid cross-reference errors and similar ones I recommend converting model definitions to functions and registering models and associations in the same one module, see this answer and the question

how to represent required fields in index mapping in elastic search

mapping: {
dynamic: 'strict',
properties: {
name: { type: 'text', index: false },
email: { type: 'keyword' },
phoneNumber: { type: 'text', index: false },
},
},
How to add the required field for name, email and phone number?

Document contains at least one immense term in field=“errormsg.keyword” (whose UTF8 encoding is longer than the max length 32766

i get this error when logstash try to write in elasticsearch.it create the index but there is no data available on elasticsearch.
Document contains at least one immense term in field="errormsg.keyword" (whose UTF8 encoding is longer than the max length 32766
this is my pipeline.conf.
input {
file {
path => "c:/logstash.log"
start_position => "beginning"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => "previous"
}
}
}
filter {
grok{
match => { "message" => "%{TIME:timestamp} %{LOGLEVEL:LEVEL} %{GREEDYDATA:errormsg}" }
}
}
output {
if "ERROR" in [LEVEL]
{
elasticsearch {
hosts=>"localhost:9200"
}
}
stdout { codec => rubydebug }
}
---Output of curl -XGET localhost:9200/logstash/_mapping
{
logstash-2017.06.16: {
mappings: {
_default_: {
_all: {
enabled: true,
norms: false
},
dynamic_templates: [
{
message_field: {
path_match: "message",
match_mapping_type: "string",
mapping: {
norms: false,
type: "text"}}},
{
string_fields: {
match: "*",
match_mapping_type: "string",
mapping: {
fields: {
keyword: {
type: "keyword"}},
norms: false,
type: "text"}}}],
properties: {
#timestamp: {
type: "date",
include_in_all: false},
#version: {
type: "keyword",
include_in_all: false},
geoip: {
dynamic: "true",
properties: {
ip: {type: "ip"},
latitude: {
type: "half_float"},
location: {
type: "geo_point"},
longitude: {type: "half_float"}}}}},
logs: {
_all: {
enabled: true,
norms: false},
dynamic_templates: [
{message_field: {
path_match: "message",
match_mapping_type: "string",
mapping: {norms: false,type: "text"}}},
{string_fields: {
match: "*",match_mapping_type: "string",
mapping: {
fields: {
keyword: {
type: "keyword"}},
norms: false,
type: "text"}}}],
properties: {
#timestamp: {
type: "date",
include_in_all: false},
#version: {
type: "keyword",
include_in_all: false},
LEVEL: {
type: "text",
norms: false,
fields: {
keyword: {
type: "keyword"}}},
errormsg: {
type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
geoip: {dynamic: "true",
properties: {
ip: {type: "ip"},
latitude: {type: "half_float"},
location: {type: "geo_point"},
longitude: {type: "half_float"}}},
host: {type: "text",norms: false,
fields: {
keyword: {type: "keyword"}}},
message: {type: "text",norms: false},
path: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
tags: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
timestamp: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}}}}}}}
and this is for example the error to parse:
17:37:17,103 ERROR [org.apache.catalina.core.ContainerBase.[jboss.web] Servlet.service()java.io.FileNotFoundException:
at org.thymeleaf.templateresource.ServletContextTemplateResource
at org.thymeleaf.templateparser.markup.AbstractMarkupTemplateParser.
17:37:17,104 ERROR.....
thank you so much for your help #xeraa

Mongoosastic: error sorting by distance

I am getting the following Elastic Search error when I try to sort search results by distance with Mongoosastic:
{ message: 'SearchPhaseExecutionException[Failed to execute phase
[query_fetch], all shards failed; shardFailures
{[rQFD7Be9QbWIfTqTkrTL7A][users][0]: SearchParseException[[users][0]:
query[filtered(+keywords:cafe)->GeoDistanceFilter(location,
SLOPPY_ARC, 25000.0, -70.0264952, 41.2708115)],from[-1],size[-1]:
Parse Failure [Failed to parse source
[{"timeout":60000,"sort":[{"[object Object]":{}}]}]]]; nested:
SearchParseException[[users][0]:
query[filtered(+keywords:cafe)->GeoDistanceFilter(location,
SLOPPY_ARC, 25000.0, -70.0264952, 41.2708115)],from[-1],size[-1]:
Parse Failure [No mapping found for [[object Object]] in order to sort
on]]; }]' }
See bellow for code sample:
var query = {
"filtered": {
"query": {
"bool": {
"must": [
{
"term": {
"keywords": "cafe"
}
}
]
}
},
"filter": {
"geo_distance": {
"distance": "25km",
"location": [
41.2708115,
-70.0264952
]
}
}
}
};
var opts = {
"sort": [
{
"_geo_distance": {
"location": [
41.2708115,
-70.0264952
],
"order": "asc",
"unit": "km",
"distance_type": "plane"
}
}
],
"script_fields": {
"distance": "doc[\u0027location\u0027].distanceInMiles(41.2708115, -70.0264952)"
}
};
User.search(query, opts, function (err, data) {
if (err || !data || !data.hits || !data.hits.hits || !data.hits.hits.length) {
return callback(err);
}
var total = data.hits.total,
//page = params.page || 1,
per_page = query.size,
from = query.from,
//to = from +
page = query.from / query.size,
rows = data.hits.hits || [];
for (var i = 0; i < rows.length; i++) {
rows[i].rowsTotal = total;
}
callback(err, toUser(rows, params));
});
Here is the User schema:
var schema = new Schema({
name: {type: String, default: '', index: true, es_type: 'string', es_indexed: true},
location: {type: [Number], es_type: 'geo_point', es_indexed: true, index: true},
shareLocation: {type: Boolean, default: false, es_type: 'boolean', es_indexed: true},
lastLocationSharedAt: {type: Date},
email: {type: String, default: '', index: true, es_type: 'string', es_indexed: true},
birthday: {type: String, default: ''},
first_name: {type: String, default: ''},
last_name: {type: String, default: ''},
gender: {type: String, default: ''},
website: {type: String, default: '', index: true, es_indexed: true},
verified: {type: Boolean, default: false},
});
I am also getting an error, I think the upgrade of Mongoosastic is double wrapping the code. It def seems to be based on 'sort' rather than on search but still reviewing. Val seems to have a better idea of what is going on as perhaps it has something to do with user schema rather than function.
I am using a similar schema and just upgraded and encountered issues.

Mongoosastic indexes the wrong fields

I have set up mongoosastic to index my documents to elastic search. The trouble is, despite the schema set, all fields are indexed.
My Schema:
id: { type: String, index: true },
title: String,
title_lower: { type: String, es_indexed: true, es_boost: 2.0 },
slug: String,
city: { type: String, es_indexed: true },
country: { type: String, es_indexed: true },
genre: [{ type: String, es_indexed: true }],
description: { type: String, es_indexed: true },
gallery: [{ title: String, images: { thumbnail: String, medium: String, full: String }}],
date: { start: String, end: String },
price: { min: Number, max: Number },
temperature: Number,
weather: String,
comments: [CommentSchema],
lineup: { type: [LineupSchema], es_indexed: true }
Here I want only title_lower, city, genre, description and lineup indexed. But checking elastic like: http://localhost:9200/events/_search I can see that all fields are added under _source.

Resources