Elasticsearch query using more_like_this field renders a failed to parse search source. expected field name but got [START_OBJECT] error - elasticsearch

We're using Elasticsearch 2.4.5. Have an application that can generate fairly complicated queries. I'm trying to add a more_like_this field to the query like so:
{
"query": {
"more_like_this": {
"fields": [
"title"
],
"ids": [
1234
],
"min_term_freq": 1,
"max_query_terms": 25
},
"function_score": {
"query": {
"bool": {
"must": [
{
"query_string": {
"default_operator": "AND",
"fields": [
"title",
"author"
],
"query": "((title:(\"Tale of Two Cities\"^2)))",
"lenient": true
}
}
],
"filter": {
"bool": {
"must": [
{
"bool": {
"should": [
{
"geo_distance": {
"distance": "50mi",
"location": {
"lat": 49.32,
"lon": -45.67
},
"distance_type": "plane",
"_cache": true
}
}
]
}
},
{
"term": {
"merged": 0
}
},
{
"bool": {
"must_not": {
"exists": {
"field": "title_type"
}
}
}
}
]
}
}
}
},
"functions": [
{
"field_value_factor": {
"field": "quality_score",
"factor": 1,
"missing": 0
}
}
]
}
},
"filter": {
"bool": {
"must": []
}
},
"sort": "_score",
"size": 20,
"from": 0
}
I'm getting a failed to parse search source. expected field name but got [START_OBJECT] error when I try to run the above code. When I remove that piece of code the query executes correctly. I've looked at documentation and other examples of more_like_this usage and I can't determine what's wrong with my query. I'm assuming it has something to do with the way the rest of the query is formed.

Related

Elastic search query not working with synonym_analyzer

I have a query that looks like this:
GET my-index/_search
{
"_source": [
"product.ref",
"product.urls.*",
"product.i18ns.*.title",
"product_sale_elements.quantity",
"product_sale_elements.prices.*.price",
"product_sale_elements.listen_price.*",
"product.images.image_url",
"product.image_count",
"product.images.visible",
"product.images.position"
],
"min_score": 0.1,
"query": {
"bool": {
"filter": [
{
"term": {
"product.is_visible": true
}
}
],
"must": [
{
"query_string": {
"query": "13114140",
"fields": [
"product.i18ns.en_US.title.analyzed",
"product.ref",
"categories.i18ns.en_US.title.analyzed"
]
}
}
]
}
},
"aggs": {
"by_categories": {
"terms": {
"field": "categories.i18ns.en_US.title.raw"
}
}
}
}
this works fairly well for me, i'm happy with it. Although I would like to add a synonym_analyzer. Problem is when I add it like this:
...
{
"query_string": {
"query": "13114140",
"fields": [
"product.i18ns.de_AT.title.analyzed",
"product.ref",
"categories.i18ns.de_AT.title.analyzed"
],
"analyzer": "synonym_analyzer"
}
}
...
The whole query seems to crumble and I dont get back anything as response. Why is this happening?

Limit the size per index when searching multiple index in Elastic

I have been following the guidelines from this post. I can get the desired output but in the same DSL how can I limit the size of results for each index ?
Full text Search with Multiple index in Elastic Search using NEST C#
POST http://localhost:9200/componenttypeindex%2Cprojecttypeindex/Componenttype%2CProjecttype/_search?pretty=true&typed_keys=true
{
"query": {
"bool": {
"should": [
{
"bool": {
"filter": [
{
"term": {
"_index": {
"value": "componenttypeindex"
}
}
}
],
"must": [
{
"multi_match": {
"fields": [
"Componentname",
"Summary^1.1"
],
"operator": "or",
"query": "test"
}
}
]
}
},
{
"bool": {
"filter": [
{
"term": {
"_index": {
"value": "projecttypeindex"
}
}
}
],
"must": [
{
"multi_match": {
"fields": [
"Projectname",
"Summary^0.3"
],
"operator": "or",
"query": "test"
}
}
]
}
}
]
}
}
}
With your given query, you could use aggregations to group and limit number of hits per index (in this case, limiting to 5):
{
"size": 0,
"query": {
... Same query as above ...
},
"aggs": {
"index_agg": {
"terms": {
"field": "_index",
"size": 20
},
"aggs": {
"hits_per_index": {
"top_hits": {
"size": 5
}
}
}
}
}
}

Elasticsearch multiple fields wildcard bool query

Currently using bool query which searches for a combination of both input words or either one of input word on field "Name". How to search on multiple fields using wild cards?
POST inventory_dev/_search
{"from":0,"query":{"bool":{"must":[{"bool":{"should":[{"term":{"Name":{"value":"dove"}}},{"term":{"Name":{"value":"3.75oz"}}},{"bool":{"must":[{"wildcard":{"Name":{"value":"*dove*"}}},{"wildcard":{"Name":{"value":"*3.75oz*"}}}]}}]}}]}},"size":10,"sort":[{"_score":{"order":"desc"}}]}
You can use query_string in place of wildcard query, to search on multiple fields
{
"from": 0,
"query": {
"bool": {
"must": [
{
"bool": {
"should": [
{
"term": {
"Name": {
"value": "dove"
}
}
},
{
"term": {
"Name": {
"value": "3.75oz"
}
}
},
{
"bool": {
"must": [
{
"query_string": {
"query": "*dove*",
"fields": [
"field1",
"Name"
]
}
},
{
"query_string": {
"query": "*3.75oz*",
"fields": [
"field1",
"Name"
]
}
}
]
}
}
]
}
}
]
}
},
"size": 10,
"sort": [
{
"_score": {
"order": "desc"
}
}
]
}

how to add filters to elastic query when using function_score?

Here is my current elastic query:
{
"from": 0,
"size": 10,
"query": {
"function_score": {
"query": {
"bool": {
"must": [{
"multi_match": {
"query": "ocean",
"fields": [],
"fuzziness": "AUTO"
}}],
"must_not": [{
"exists": {
"field": "parentId"
}
}]
}
},
"functions" : [
{
"gauss": {
"createTime": {
"origin": "2020-07-09T23:50:00",
"scale": "365d",
"decay": 0.3
}
}
}
]
}
}
}
How do I properly add filters to this? I think maybe the fact that I'm using function_score makes this different? I would like to add a hard filter, for example, only show me results with uploadUser: 'Mr. Bean' ... but still keep the scoring in place for the results that pass this filter.
I tried using filter in various places, also using must but I either get no results or all the results.
I'm using Elastic Search 7. Thanks for your help
You can try this below search query:
Refer this ES official documentation to know more about Function score query
{
"from": 0,
"size": 10,
"query": {
"function_score": {
"query": {
"bool": {
"filter": {
"term": {
"uploadUser": "Mr. Bean"
}
},
"must": [
{
"multi_match": {
"query": "ocean",
"fields": [
],
"fuzziness": "AUTO"
}
}
],
"must_not": [
{
"exists": {
"field": "parentId"
}
}
]
}
},
"functions": [
{
"gauss": {
"createTime": {
"origin": "2020-07-09T23:50:00",
"scale": "365d",
"decay": 0.3
}
}
}
]
}
}
}

Use partial_fields in elasticsearch kibana query

I am trying to add the partial_fields directive to an elasticsearch query (generated from kibana's table widget).
Where exactly would I have to place this statement in the below ES query?
Already tried to add it right after the first "query" node which produces valid json but still doesn't exclude xmz_Data
"partial_fields": {
"partial1": {
"exclude": "xmz_Data"
}
},
ES Query
{
"query": {
"filtered": {
"query": {
"bool": {
"should": [
{
"query_string": {
"query": "*"
}
}
]
}
},
"filter": {
"bool": {
"must": [
{
"match_all": {}
},
{
"bool": {
"must": [
{
"match_all": {}
}
]
}
}
]
}
}
}
},
"highlight": {
"fields": {},
"fragment_size": 2147483647,
"pre_tags": [
"#start-highlight#"
],
"post_tags": [
"#end-highlight#"
]
},
"size": 250,
"sort": [
{
"timestamp": {
"order": "desc"
}
}
]
}
You can place the partial_fields directive anywhere in your query, I tested successfully with it both before and after the query node. However, your formatting for the excluded fields value is incorrect. Your exclude fields value needs to be an array. Try this instead...
"partial_fields": {
"partial1": {
"exclude": ["xmz_Data"]
}
},

Resources