Related
I'm trying to use analyzers to return alphabetically sorted data, however my changes are always returned in lexographical order. I've tried multiple implementations from here and other sources to no avail. Is the issue in my tokenizer? Or possibly my use of custom analyzers i wrong? Thanks in advance
await client.indices.create({
index: esIndexReport,
body: {
settings: {
analysis: {
filter: {
min_term_length: {
type: 'length',
min: 2,
},
},
analyzer: {
name_analyzer: {
tokenizer: 'whitespace',
filter: [
'lowercase',
'min_term_length',
],
},
min_term_analyzer: {
tokenizer: 'standard',
filter: [
'lowercase',
'min_term_length',
],
},
},
},
},
mappings: {
report: {
properties: {
reportId: {
type: 'text',
analyzer: 'min_term_analyzer',
},
reportName: {
type: 'text',
analyzer: 'name_analyzer',
},
description: {
type: 'text',
analyzer: 'name_analyzer',
},
author: {
type: 'text',
analyzer: 'min_term_analyzer',
},
icType: {
type: 'text',
analyzer: 'min_term_analyzer',
},
status: {
type: 'text',
analyzer: 'min_term_analyzer',
},
lastUpdatedAt: {
type: 'text',
analyzer: 'min_term_analyzer',
},
'sort.reportName': {
type: 'text',
fielddata: true,
},
'sort.description': {
type: 'text',
fielddata: true,
},
'sort.author': {
type: 'text',
fielddata: true,
},
'sort.status': {
type: 'text',
fielddata: true,
},
'sort.lastUpdatedAt': {
type: 'text',
fielddata: true,
},
},
},
},
},
});
i get this error when logstash try to write in elasticsearch.it create the index but there is no data available on elasticsearch.
Document contains at least one immense term in field="errormsg.keyword" (whose UTF8 encoding is longer than the max length 32766
this is my pipeline.conf.
input {
file {
path => "c:/logstash.log"
start_position => "beginning"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => "previous"
}
}
}
filter {
grok{
match => { "message" => "%{TIME:timestamp} %{LOGLEVEL:LEVEL} %{GREEDYDATA:errormsg}" }
}
}
output {
if "ERROR" in [LEVEL]
{
elasticsearch {
hosts=>"localhost:9200"
}
}
stdout { codec => rubydebug }
}
---Output of curl -XGET localhost:9200/logstash/_mapping
{
logstash-2017.06.16: {
mappings: {
_default_: {
_all: {
enabled: true,
norms: false
},
dynamic_templates: [
{
message_field: {
path_match: "message",
match_mapping_type: "string",
mapping: {
norms: false,
type: "text"}}},
{
string_fields: {
match: "*",
match_mapping_type: "string",
mapping: {
fields: {
keyword: {
type: "keyword"}},
norms: false,
type: "text"}}}],
properties: {
#timestamp: {
type: "date",
include_in_all: false},
#version: {
type: "keyword",
include_in_all: false},
geoip: {
dynamic: "true",
properties: {
ip: {type: "ip"},
latitude: {
type: "half_float"},
location: {
type: "geo_point"},
longitude: {type: "half_float"}}}}},
logs: {
_all: {
enabled: true,
norms: false},
dynamic_templates: [
{message_field: {
path_match: "message",
match_mapping_type: "string",
mapping: {norms: false,type: "text"}}},
{string_fields: {
match: "*",match_mapping_type: "string",
mapping: {
fields: {
keyword: {
type: "keyword"}},
norms: false,
type: "text"}}}],
properties: {
#timestamp: {
type: "date",
include_in_all: false},
#version: {
type: "keyword",
include_in_all: false},
LEVEL: {
type: "text",
norms: false,
fields: {
keyword: {
type: "keyword"}}},
errormsg: {
type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
geoip: {dynamic: "true",
properties: {
ip: {type: "ip"},
latitude: {type: "half_float"},
location: {type: "geo_point"},
longitude: {type: "half_float"}}},
host: {type: "text",norms: false,
fields: {
keyword: {type: "keyword"}}},
message: {type: "text",norms: false},
path: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
tags: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}},
timestamp: {type: "text",norms: false,
fields: {
keyword: {
type: "keyword"}}}}}}}}
and this is for example the error to parse:
17:37:17,103 ERROR [org.apache.catalina.core.ContainerBase.[jboss.web] Servlet.service()java.io.FileNotFoundException:
at org.thymeleaf.templateresource.ServletContextTemplateResource
at org.thymeleaf.templateparser.markup.AbstractMarkupTemplateParser.
17:37:17,104 ERROR.....
thank you so much for your help #xeraa
I have set up mongoosastic to index my documents to elastic search. The trouble is, despite the schema set, all fields are indexed.
My Schema:
id: { type: String, index: true },
title: String,
title_lower: { type: String, es_indexed: true, es_boost: 2.0 },
slug: String,
city: { type: String, es_indexed: true },
country: { type: String, es_indexed: true },
genre: [{ type: String, es_indexed: true }],
description: { type: String, es_indexed: true },
gallery: [{ title: String, images: { thumbnail: String, medium: String, full: String }}],
date: { start: String, end: String },
price: { min: Number, max: Number },
temperature: Number,
weather: String,
comments: [CommentSchema],
lineup: { type: [LineupSchema], es_indexed: true }
Here I want only title_lower, city, genre, description and lineup indexed. But checking elastic like: http://localhost:9200/events/_search I can see that all fields are added under _source.
I am trying to do geo point filter in mongodb in meanjs. i have used mongoosastic modules but i am not able to perform geo point filter.
here below are the mongoose schema and controller code for filter.
Mongoose schema
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema,
mongoosastic = require('mongoosastic');
var BusinessSchema = new Schema({
name: {type: String, unique: 'Name already exists', trim: true, required: 'Name is required.', es_indexed: true},
searchTags: {type: [String], es_indexed: true},
alias: {type: Array, es_indexed: true},
// geoLocation: { type: [Number], /*/ [<longitude>, <latitude>]*/ index: '2d', /*/ create the geospatial index,*/ required: 'GeoLocation is required.', es_indexed:true,es_type:'geo_point'},
geo_cords: {
type: Array
},
address: {
address1: {type: String, required: 'Address is required', trim: true},
address2: String,
city: {type: String, required: 'City is required', trim: true},
// state: {type: String, required: 'State is required', trim: true},
country: {type: String, required: 'Country is required', trim: true},
postalCode: {type: String, required: 'Postal code is required', trim: true},
neighbourhood: String
},
isActive: {
type: Boolean,
default: true,
es_indexed: true
},
dateUpdated: {
type: Date
, es_indexed: true
},
dateCreated: {
type: Date,
default: Date.now
, es_indexed: true
}
});
controller code for filter and query
var mongoose = require('mongoose'),
Business = mongoose.model('Businesses');
var query = {
"query_string": {
"multi_match": {
"query": categoryIds.join(' OR '),
"fields": ["categoryIds", "relatedCategoryIds"]
}
},
"filter": {
"bool": {
"should": [
{"term": {"address.postalCode": "110016"}},
{"geo_distance": {
"distance": "50km",
"geo_cords": [-122.3050, 37.9174]
}
}
],
}
}
}
Business.search(query, function (err, results) {
// sendResponse(req, res, err, results)
if (!err) {
res.json(results);
} else {
res.status(400).send({message: 'Business Not Found'})
}
});
while doing this i am getting a long error saying
QueryParsingException[[businessess] failed to find geo_point field [geo_cords]
According to the documentation of mongoosastic
Geo mapping
Prior to index any geo mapped data (or calling the synchronize), the mapping must be manualy created with the createMapping (see above).
First, in your schema, define 'geo_cords' this way:
geo_cords: : {
geo_point: {
type: String,
es_type: 'geo_point',
es_lat_lon: true
},
lat: { type: Number },
lon: { type: Number }
}
Add an es_type: 'object' to each Array or embbeded type
alias: {type: Array, es_indexed: true, es_type: 'object'}
Then call .createMapping() on the model just after you've created it.
I have a model:
Ext.define('CrudTest.model.User', {
extend: 'Ext.data.Model',
idProperty: 'Id',
fields: [
{ name: 'Id', type: 'int' },
{ name: 'Name', type: 'string' },
{ name: 'PhoneNumber', type: 'int' },
{ name: 'Address', type: 'string' },
{ name: 'StateId', type: 'int', reference: 'State' },
],
validators: [
{ type: 'presence', field: 'Name', message: 'define name, please' },
{ type: 'length', field: 'PhoneNumber', max: 8, messsage: 'lower than 8 digit' },
],
proxy: {
type: 'ajax',
api: {
create: 'home/new',
read: 'home/users',
update: 'home/Edit',
destroy: 'home/Delete'
},
},
});
and a form that load data to form by loadRecord() and my handler code for submit button is:
var form = this.up('form').getForm();
if (form.isValid()) {
form.getRecord().save();
}
it make a post request through my proxy model good. but the body of request just have dirty(edited) fields. why i don't have other fields?
but in request body i have just dirty fields. why? i know updateRecord() uses getFieldValues([onlyDirty=false]), how can send all fields values?
I use extjs 5
Finally find the problem. Ext.data.writer.Writer has a config property writeAllFields
So i change the proxy to this:
proxy: {
writer:{ writeAllFields:true },
type: 'ajax', //also works with type: 'direct',
api: {
create: 'home/new',
read: 'home/users',
update: 'home/Edit',
destroy: 'home/Delete'
},
You can set crtical: true on the Model to any fields you always want written whether changed or not, e.g.
{ name: 'title', type: 'string', critical: true }