I am trying to simulate a watch and see if the actions are triggering fine. But my problem is the search returns no results.
My query
Checks for a particular index.
Checks for a range
Check for the servicename field to be a particular value.
This is my watch definition
{
"trigger": {
"schedule": {
"interval": "10m"
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"body": {
"query": {
"bool": {
"filter": [
{
"term": {
"level": {
"value": "ERROR"
}
}
},
{
"term": {
"servicename": [
"Iit.Det.Urm.MepsSubscriber"
]
}
},
{
"range": {
"#timestamp": {
"gte": "now-60m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gt": 0
}
}
},
"actions": {
"notify-slack": {
"slack": {
"account": "elastic_watcher_alerts",
"proxy": {
"host": "proxy.dom",
"port": 80
},
"message": {
"from": "Error Monitor",
"to": [
"#det-errors"
],
"text": "The following error(s) have been logged",
"dynamic_attachments": {
"list_path": "ctx.payload.items",
"attachment_template": {
"color": "#f00",
"title": "{{msg}}",
"title_link": "https://elastic.mid.dom:port/{{index}}/doc/{{id}}?pretty",
"text": "{{msg}}",
"fields": [
{
"title": "Server",
"value": "{{host}}",
"short": true
},
{
"title": "Servicename",
"value": "{{service}}",
"short": true
}
]
}
}
}
}
}
},
"transform": {
"script": {
"source": "['items': ctx.payload.hits.hits.collect(hit -> ['msg': hit._source.message, 'service': hit._source.servicename, 'index': hit._index, 'id' : hit._id, 'host': hit._source.agent.hostname ])]",
"lang": "painless"
}
}
}
I am trying to now test it by using the simulate option and giving it an input. This input is copied from actual data that is in the index. I copied a json document from kibana (in the discover section), so the alternate input json should be ok
Here's the alternative input
{
"_index": "datasolutions-svc-live-7.7.0-2021.01",
"_type": "doc",
"_id": "Hre9SHcB1QIqYEnyxSCw",
"_version": 1,
"_score": null,
"_source": {
"exception": "System.Data.SqlClient.SqlException (0x80131904): blabla",
"agent": {
"hostname": "SATSVC3-DK1",
"name": "datasolutions-svc-live",
"id": "8c826ae1-e411-4257-a31f-08824dd58b5a",
"type": "filebeat",
"ephemeral_id": "e355bf8a-be67-4ed1-85f4-b9043674700e",
"version": "7.7.0"
},
"log": {
"file": {
"path": "D:\\logs\\7DaysRetention\\Iit.Det.Urm.MepsSubscriber\\Iit.Det.Urm.MepsSubscriber.log.20210128.log"
},
"offset": 17754757
},
"level": "ERROR",
"message": "Error while starting service.",
"#timestamp": "2021-02-17T10:00:28.343Z",
"ecs": {
"version": "1.5.0"
},
"host": {
"name": "datasolutions-svc-live"
},
"servicename": "Iit.Det.Urm.MepsSubscriber",
"codelocation": "Iit.Det.Urm.MepsSubscriber.MepsSubscriberService.OnStart:29"
},
"fields": {
"#timestamp": [
"2021-02-17T10:00:28.343Z"
]
},
"highlight": {
"servicename": [
"#kibana-highlighted-field#Iit.Det.Urm.MepsSubscriber#/kibana-highlighted-field#"
]
},
"sort": [
1611833128343
]
}
But when I run "simulate", I get the ctx.payload.total.hits as null because apparently it does not find any results. Result of the simulate-
{
"watch_id": "_inlined_",
"node": "eMS-E34eT4-zZhGwtPNSmw",
"state": "execution_not_needed",
"user": "sum",
"status": {
"state": {
"active": true,
"timestamp": "2021-02-17T10:57:04.077Z"
},
"last_checked": "2021-02-17T10:57:04.077Z",
"actions": {
"notify-slack": {
"ack": {
"timestamp": "2021-02-17T10:57:04.077Z",
"state": "awaits_successful_execution"
}
}
},
"execution_state": "execution_not_needed",
"version": -1
},
"trigger_event": {
"type": "manual",
"triggered_time": "2021-02-17T10:57:04.077Z",
"manual": {
"schedule": {
"scheduled_time": "2021-02-17T10:57:04.077Z"
}
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"filter": [
{
"term": {
"level": {
"value": "ERROR"
}
}
},
{
"term": {
"servicename": [
"Iit.Det.Urm.MepsSubscriber"
]
}
},
{
"range": {
"#timestamp": {
"gte": "now-60m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gt": 0
}
}
},
"metadata": {
"name": "datasolutions-svc-mepssubscriber",
"xpack": {
"type": "json"
}
},
"result": {
"execution_time": "2021-02-17T10:57:04.077Z",
"execution_duration": 0,
"input": {
"type": "simple",
"status": "success",
"payload": {
"highlight": {
"servicename": [
"#kibana-highlighted-field#Iit.Det.Urm.MepsSubscriber#/kibana-highlighted-field#"
]
},
"_index": "datasolutions-svc-live-7.7.0-2021.01",
"_type": "doc",
"_source": {
"exception": "System.Data.SqlClient.SqlException (0x80131904): blabla",
"agent": {
"hostname": "SATSVC3-DK1",
"name": "datasolutions-svc-live",
"id": "8c826ae1-e411-4257-a31f-08824dd58b5a",
"type": "filebeat",
"ephemeral_id": "e355bf8a-be67-4ed1-85f4-b9043674700e",
"version": "7.7.0"
},
"#timestamp": "2021-02-17T10:00:28.343Z",
"ecs": {
"version": "1.5.0"
},
"log": {
"file": {
"path": "D:\\logs\\7DaysRetention\\Iit.Det.Urm.MepsSubscriber\\Iit.Det.Urm.MepsSubscriber.log.20210128.log"
},
"offset": 17754757
},
"level": "ERROR",
"host": {
"name": "datasolutions-svc-live"
},
"servicename": "Iit.Det.Urm.MepsSubscriber",
"message": "Error while starting service.",
"codelocation": "Iit.Det.Urm.MepsSubscriber.MepsSubscriberService.OnStart:29"
},
"_id": "Hre9SHcB1QIqYEnyxSCw",
"sort": [
1611833128343
],
"_score": null,
"fields": {
"#timestamp": [
"2021-02-17T10:00:28.343Z"
]
},
"_version": 1
}
},
"condition": {
"type": "compare",
"status": "success",
"met": false,
"compare": {
"resolved_values": {
"ctx.payload.hits.total": null
}
}
},
"actions": []
},
"messages": []
}
I am not sure what can't it find the results. Can someone tell me what is it that I am doing wrong?
I was able to solve it using the "inspect" section of discover page of the index.
Finally my input for the watcher query had to be changed to
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"must": [],
"filter": [
{
"bool": {
"should": [
{
"match_phrase": {
"servicename": "Iit.Det.Urm.MepsSubscriber"
}
}
],
"minimum_should_match": 1
}
},
{
"match_phrase": {
"level": "ERROR"
}
},
{
"range": {
"#timestamp": {
"gte": "now-10m",
"format": "strict_date_optional_time"
}
}
}
],
"should": [],
"must_not": []
}
}
}
}
}
}
Related
I have following webhook watcher which is working perfectly creating OTRS ticket when word "Error" appears in logs.
{
"trigger": {
"schedule": {
"interval": "1m"
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "Error"
}
},
{
"range": {
"#timestamp": {
"gte": "now-1m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gte": 1
}
}
},
"actions": {
"create_otrs": {
"transform": {
"script": {
"source": "return ['Ticket':['Queue':'Engineering Team','Priority':'P3','CustomerUser':'root','Title':'RESTCreateTest','State':'new','Type':'Incident'],'Article':['ContentType':'text/plain;charset=utf8','Subject':'RestCreateTest','Body':'This is only a test']]",
"lang": "painless"
}
},
"webhook": {
"scheme": "http",
"host": "myotrs.com",
"port": 80,
"method": "post",
"path": "/otrs/GenericTicketConnectorREST/User=<User>&Pass=<Password>",
},
"headers": {},
"body": "{{#toJson}}ctx.payload{{/toJson}}"
}
}
}
}
However, right now functionality is limited as it creates ticket with fixed body This is only a test. I want to pass the exact error message body of the log into the ticket body. How to pass variables into script body?
I have included one of the hits, it also shows the document I am trying to access. I need the "ctx.payload.hits.total" part of the document-hit to be the body of the ticket.
{
"_index": ".ds-logs-elastic_agent.filebeat-default",
"_source": {
"input_source": "https://ser.example.com:80/export",
"agent": {
"name": "syslog01",
"id": "5836558b-b17d-445e",
"type": "filebeat",
"ephemeral_id": "36bdfeca-3c60",
"version": "8.3.3"
},
"service.name": "filebeat",
"log": {
"file": {
"path": "/opt/Elastic/Agent/data/elastic-agent-0ffbed/logs/default/filebeat-20230127-12.ndjson"
},
"offset": 248078415
},
"elastic_agent": {
"id": "5836558b-b17d",
"version": "8.3.3",
"snapshot": false
},
"message": """Error while processing http request: failed to execute rf.collectResponse: failed to execute http client.Do: failed to execute http client.Do: Post "https://ser.example.com:80/export": POST https://ser.example.com:80/export giving up after 6 attempts""",
"log.logger": "input.httpjson-cursor",
"input": {
"type": "filestream"
},
"log.origin": {
"file.line": 128,
"file.name": "httpjson/input.go"
},
"#timestamp": "2023-01-27T14:44:42.790Z",
"ecs": {
"version": "8.0.0"
},
"data_stream": {
"namespace": "default",
"type": "logs",
"dataset": "elastic_agent.filebeat"
},
"host": {
"hostname": "syslog01",
"os": {
"kernel": "3.10.25-gentoo",
"name": "Gentoo",
"type": "linux",
"family": "",
"version": "",
"platform": "gentoo"
},
"containerized": false,
"log.level": "error",
"input_url": "https://ser.example.com:8089/export",
"id": "httpjson-system.security-ba2ec41b-457b-442a",
"event": {
"agent_id_status": "verified",
"ingested": "2023-01-27T14:44:58Z",
"dataset": "elastic_agent.filebeat"
}
},
"_id": "pCWw84UB8FDLddfs",
"_score": 2.2840834
}
}
I'm trying out Elastic APM. I have successfully created a service with data flowing in. I wanted to see if I can have multiple services. Somehow, I ran into problems, so I wanted to delete some services. However, I couldn't find a way to delete a service.
Question : How can I delete a service in APM?
Further information
Indexes related to APM :
{
"_index": "apm-7.3.2-metric-000001",
"_type": "_doc",
"_id": "XgEhYm0BiAdOXLlDGc-r",
"_version": 1,
"_score": null,
"_source": {
"jvm": {
"memory": {
"non_heap": {
"committed": 87449600,
"max": -1,
"used": 66599704
},
"heap": {
"committed": 232783872,
"max": 2025848832,
"used": 170023936
}
},
"thread": {
"count": 63
},
"gc": {
"alloc": 632406344
}
},
"observer": {
"hostname": "localhost.localdomain",
"id": "d1aec10a-cc4e-44f4-9aed-acf57d107ab7",
"ephemeral_id": "ae48b040-f9f6-4144-a600-d402defaa44a",
"type": "apm-server",
"version": "7.3.2",
"version_major": 7
},
"agent": {
"name": "java",
"ephemeral_id": "66d5c439-271c-483d-a426-d0e569bede4a",
"version": "1.9.0"
},
"process": {
"pid": 16154,
"title": "/usr/lib/jvm/java-11-openjdk-11.0.1.13-3.el7_6.x86_64/bin/java",
"ppid": 1
},
"#timestamp": "2019-09-24T07:16:28.461Z",
"system": {
"process": {
"memory": {
"size": 6070763520
},
"cpu": {
"total": {
"norm": {
"pct": 0.001925814284518128
}
}
}
},
"memory": {
"actual": {
"free": 749580288
},
"total": 8102449152
},
"cpu": {
"total": {
"norm": {
"pct": 0.033324960227748474
}
}
}
},
"ecs": {
"version": "1.0.1"
},
"service": {
"name": "ldap1",
"runtime": {
"name": "Java",
"version": "11.0.1"
},
"language": {
"name": "Java",
"version": "11.0.1"
}
},
"host": {
"hostname": "localhost.localdomain",
"os": {
"platform": "Linux"
},
"ip": "127.0.0.1",
"architecture": "amd64"
},
"processor": {
"name": "metric",
"event": "metric"
}
},
"fields": {
"#timestamp": [
"2019-09-24T07:16:28.461Z"
]
},
"sort": [
1569309388461
]
}
Above contains the service that I want to remove.
"service": {
"name": "ldap1",
"runtime": {
"name": "Java",
"version": "11.0.1"
},
You simply need to change your query to this:
POST apm*/_delete_by_query
{
"query": {
"term": {
"service.name": "ldap1"
}
}
}
The accepted answer no longer works, you can use the following
POST /apm-*/_delete_by_query
{
"query": {
"bool": {
"must": [
{
"term": {
"service.name": {
"value": "my-application"
}
}
}
]
}
}
}
Pleas try it out
POST .*apm*/_delete_by_query
{
"query": {
"term": {
"service.name": {
"value": "web-test"
}
}
}
}
Refer to https://discuss.elastic.co/t/how-to-delete-amp-service/301908/3
Hope to help you!
I'm new in ElasticSearch and I have a few questions regarding nested object retrieval when a specific condition is matched.
I have a tree-like structure as follow:
{
"id": 4,
"sora": [
{
"pContext": {
"context": {
"sT": "D3",
"uT": "ST"
},
"entities": [
{
"name": "premium",
"bName": "premium",
"fT": "site",
"eT": "F_P",
"children": [
{
"name": "capa",
"bName": "capa",
"fT": "site",
"eT": "FFT",
"children": []
},
{
"name": "code",
"bName": "Codes",
"fT": "site",
"eT": "FFT",
"children": []
},
{
"name": "selection A",
"fT": "site",
"eT": "SELECTION_A",
"children": [
{
"name": "A1",
"fT": "site",
"eT": "ADD",
"children": []
},
{
"name": "A2",
"fT": "site",
"eT": "ADD",
"children": []
}
]
}
]
}
]
}
},
{
"pContext": {
"context": {
"sT": "D2",
"uT": "ST"
},
"entities": [
{
"name": "112",
"bName": "112",
"eT": "D_TYPE",
"children": []
}
]
}
}
]
}
My structure can have more levels.
I have many documents as described above. In order to filter my document I can use the simple query sintax:
{
"_source": {
"excludes": [
"*.context"
]
},
"query": {
"bool": {
"must": [
{
"match": {
"sora.pContext.context.sT": "D3"
},
"match": {
"sora.pContext.entities.name": "premium"
},
"match": {
"sora.pContext.entities.fT": "site"
}
}
]
}
}
}
What I would like to know is, how can I get the nested object that
matches my query and their children. I need the object that matched
the must inclusive filter. Is that possible?
How can I search for a field without specifing the path?
Thanks
# EDIT
My mapping:
{
"mappings": {
"abc": {
"properties": {
"id": {
"type": "integer"
},
"sora": {
"type": "nested",
"properties": {
"pContext": {
"type": "nested",
"properties": {
"context": {
"type": "nested",
"properties": {
"sT": {
"type": "text"
},
"uT": {
"type": "text"
}
}
},
"entities": {
"type": "nested",
"properties": {
"name": {
"type": "text"
},
"bName": {
"type": "text"
},
"fT": {
"type": "text"
},
"eT": {
"type": "text"
},
"children": {
"type": "object"
}
}
}
}
}
}
}
}
}
}
}
Yes you can get the matching objects by using inner_hits along with nested query and not the one you added to the question.
Your query will look as below:
{
"_source": {
"excludes": [
"*.context"
]
},
"query": {
"bool": {
"filter": [
{
"nested": {
"inner_hits": {},
"path": "sora.pContext",
"query": {
"bool": {
"must": [
{
"nested": {
"path": "sora.pContext.context",
"query": {
"bool": {
"must": [
{
"match": {
"sora.pContext.context.sT": "D3"
}
}
]
}
}
}
},
{
"nested": {
"path": "sora.pContext.entities",
"query": {
"bool": {
"must": [
{
"match": {
"sora.pContext.entities.name": "premium"
}
},
{
"match": {
"sora.pContext.entities.fT": "site"
}
}
]
}
}
}
}
]
}
}
}
}
]
}
}
}
I have added link to inner_hits documentation where you can understand how the results will look like.
Well, if someone else is facing the same issue my solution was added all child in the same path/level as the parent but keep the mapping with parent and their children. With that, I'm able to search and retrieve the parts of the parent as wanted.
How to get by ids and filter notes to leave only given category?
Data:
POST c1_2/Blog/1
{
"post": {
"notes": {
"main": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
]
}
}
}
POST c1_2/Blog/2
{
"post": {
"notes": {
"main": [
{
"message": "blablabla",
"category": "second"
},
{
"message": "blablabla",
"category": "third"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
]
}
}
}
Search:
POST c1_2/Blog/_search
{
"query": {
"bool": {
"must": [
{
"ids": {
"values": [
1,
2,
3
]
}
},
{
"terms": {
"post.notes.main.category": [
"test"
]
}
}
]
}
}
}
Current results, objects in notes main/cart aren't filtered by category:
{
"took": 9,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 1,
"max_score": 1.0122644,
"hits": [
{
"_index": "c1_2",
"_type": "Blog",
"_id": "1",
"_score": 1.0122644,
"_source": {
"post": {
"notes": {
"main": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
]
}
}
}
}
]
}
}
Desired effect:
"notes": {
"main": [
{
"message": "blablabla",
"category": "test"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
}
]
}
In my real app query is embedded in "filtered" and "filter", if I will put query above instead filter "ids" like in example below, then will it return the same data?
POST c1_2/Blog/_search
{
"query": {
"filtered": {
"query": {
"match_all": {}
},
"filter": {
"ids": {
"values": [
"1",
"2"
]
}
}
}
}
}
Mapping:
{
"posts": {
"mappings": {
"posts": {
"dynamic_templates": [{
"blog": {
"mapping": {
"index": "analyzed"
},
"path_match": "blog.*",
"path_unmatch": "*.medias.*"
}
}, {
"ids": {
"mapping": {
"index": "not_analyzed",
"type": "string"
},
"match": "_id|base_id",
"match_pattern": "regex"
}
}],
"_all": {
"enabled": false
},
"properties": {
"query": {
"properties": {
"filtered": {
"properties": {
"filter": {
"properties": {
"ids": {
"properties": {
"values": {
"type": "string"
}
}
}
}
},
"query": {
"properties": {
"match_all": {
"type": "object"
}
}
}
}
},
"match_all": {
"type": "object"
}
}
},
"source": {
"dynamic": "true",
"properties": {
"post": {
"dynamic": "true",
"properties": {
"_id": {
"type": "string",
"index": "not_analyzed"
},
"base_id": {
"type": "string",
"index": "not_analyzed"
}
}
}
}
},
"blog": {
"properties": {
"post": {
"properties": {
"_id": {
"type": "string"
},
"notes": {
"properties": {
"main": {
"properties": {
"id": {
"type": "string"
},
"message": {
"type": "string"
},
"category": {
"type": "string"
}
}
},
"cart": {
"properties": {
"id": {
"type": "string"
},
"message": {
"type": "string"
},
"category": {
"type": "string"
}
}
}
}
}
}
}
}
}
}
}
}
}
}
{
"query": {
"filtered": {
"filter": {
"bool": {
"must": {
"terms": {
"_id": [1, 2]
}
},
"must_not": {
"terms": {
"post.notes.main.category": [
"other"
]
}
}
}
}
}
}
}
How to add additional filter to match category values in blog.post.notes all fields? First I want to filter by ids, then filter notes category, is it possible?
I can filter only by ids:
GET posts/posts/_search?fields=_id&_source=blog.post.notes
{
"query": {
"filtered": {
"query": {
"match_all": {}
},
"filter": {
"ids": {
"values": [
"100000000001234"
]
}
}
}
}
}
How to filter e.g. "test" category from current results:
{
"took": 58,
"timed_out": false,
"_shards": {
"total": 5,
"successful": 5,
"failed": 0
},
"hits": {
"total": 1,
"max_score": 1,
"hits": [{
"_index": "posts",
"_type": "posts",
"_id": "100000000001234",
"_score": 1,
"_source": {
"blog": {
"post": {
"notes": {
"main": [{
"message": "blablabla",
"category": "test"
}, {
"message": "blablabla",
"category": "other"
}],
"cart": [{
"message": "blablabla",
"category": "test"
}, {
"message": "blablabla",
"category": "other"
}]
}
}
}
}
}]
}
}
curl -XGET localhost:9200/posts/_mapping/posts
{
"posts": {
"mappings": {
"posts": {
"dynamic_templates": [{
"blog": {
"mapping": {
"index": "analyzed"
},
"path_match": "blog.*",
"path_unmatch": "*.medias.*"
}
}, {
"ids": {
"mapping": {
"index": "not_analyzed",
"type": "string"
},
"match": "_id|base_id",
"match_pattern": "regex"
}
}],
"_all": {
"enabled": false
},
"properties": {
"query": {
"properties": {
"filtered": {
"properties": {
"filter": {
"properties": {
"ids": {
"properties": {
"values": {
"type": "string"
}
}
}
}
},
"query": {
"properties": {
"match_all": {
"type": "object"
}
}
}
}
},
"match_all": {
"type": "object"
}
}
},
"source": {
"dynamic": "true",
"properties": {
"post": {
"dynamic": "true",
"properties": {
"_id": {
"type": "string",
"index": "not_analyzed"
},
"base_id": {
"type": "string",
"index": "not_analyzed"
}
}
}
}
},
"blog": {
"properties": {
"post": {
"properties": {
"_id": {
"type": "string"
},
"notes": {
"properties": {
"main": {
"properties": {
"id": {
"type": "string"
},
"message": {
"type": "string"
},
"category": {
"type": "string"
}
}
},
"cart": {
"properties": {
"id": {
"type": "string"
},
"message": {
"type": "string"
},
"category": {
"type": "string"
}
}
}
}
}
}
}
}
}
}
}
}
}
}
You can use bool query with must on ids and terms
POST c1_2/Test/_search
{
"query": {
"bool": {
"must": [
{
"ids": {
"values": [
1,
2,
3
]
}
},
{
"terms": {
"blog.post.notes.main.category": [
"categoryfilter"
]
}
}
]
}
}
}
But since you have main and cart categories you must use filter on each of them, in my example i filter on main categories, if you need to do filter on both you need to use one more or filter which will filter on main or cart categories
Also you should know that category should be not_analyzed in order to filter on something like "my super category" other wise query will not be working properly.
Example
POST c1_2/Blog/1
{
"post": {
"notes": {
"main": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
]
}
}
}
POST c1_2/Blog/2
{
"post": {
"notes": {
"main": [
{
"message": "blablabla",
"category": "second"
},
{
"message": "blablabla",
"category": "third"
}
],
"cart": [
{
"message": "blablabla",
"category": "test"
},
{
"message": "blablabla",
"category": "other"
}
]
}
}
}
POST c1_2/Blog/_search
{
"query": {
"bool": {
"must": [
{
"ids": {
"values": [
1,
2,
3
]
}
},
{
"terms": {
"post.notes.main.category": [
"test"
]
}
}
]
}
}
}