I have this query which is hitting the below table. How can I add source field to it to return specific columns from the _source field of this index?
GET mktg_percolate_postmetrics_mib/_search
{
"query": {
"bool": {
"must": [
{
"range": {
"end_date": {
"gte": "now-30d",
"lte": "now",
"format": "epoch_millis"
}
}
}
]
}
},
"size": 0,
"aggs": {
"2": {
"terms": {
"field": "post_id",
"size": 25,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"sum": {
"script": "doc['facebook_likes'].value + doc['facebook_shares'].value + doc['facebook_comments'].value + doc['facebook_post_consumptions_by_type_link_clicks'].value",
"lang": "expression"
}
}
}
}
}
}
Related
I can use aggregate to make some stats between two timestamps as following:
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"term": {
"status": "ok"
}
},
{
"term": {
"deviceId": "123456789"
}
},
{
"range": {
"time": {
"gte": 1669852800,
"lt": 1671062400
}
}
}
]
}
},
"aggs": {
"results": {
"date_histogram": {
"field": "time",
"fixed_interval": "60",
}
}
}
}
Is it possible to query the results contain specific time range daily only? For example, 7am - 9am daily between Dec.1 to Dec.15. How to achieve it?
I found the solution on elasticsearch v7.15.2 as following:
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"term": {
"status": "ok"
}
},
{
"term": {
"deviceId": "123456789"
}
},
{
"range": {
"time": {
"gte": 1669852800,
"lt": 1671062400
}
}
},
{
"script": {
"script": {
"source": "doc.time.value.getHourOfDay() >= params.min && doc.time.value.getHourOfDay() < params.max",
"params": {
"min": 8,
"max": 10
}
}
}
}
]
}
},
"aggs": {
"results": {
"date_histogram": {
"field": "time",
"fixed_interval": "60"
}
}
}
}
The syntax is slightly different from the comment above, but it works.
I have build a query based on serial_diff aggregation. I am trying to sort the result based on the result of the serial_diff agg. I am struggling to get the result in order, below.
GET db/_search
{
"size": 0,
"query": {
"bool": {
"must": [
{
"terms": {
"Name": [
"q"
]
}
}
],
"filter": [
{
"range": {
"ts": {
"gte": "2020-03-09T09:00:00.000Z",
"lte": "2020-03-09T12:40:00.000Z",
"format": "date_optional_time"
}
}
}
]
}
},
"aggs": {
"sourceNameCount": {
"cardinality": {
"field": "sourceName"
}
},
"sourceName": {
"terms": {
"size": 100,
"field": "sourceName"
},
"aggs": {
"timeseries": {
"date_histogram": {
"field": "ts",
"min_doc_count": 1,
"interval": "15m",
"order": {
"_key": "asc"
}
},
"aggs": {
"the_sum":{
"avg":{
"field": "libVal"
}
},
"ts_diff":{
"serial_diff": {
"buckets_path": "the_sum",
"lag": 1
}
}
}
}
}
}
}
}
I have a visualization on hourly basis. Data from 1 to 2 is displayed at 1 o'clock. I want it to be displayed at 2 o'clock. How can I shift the graph by 1 ?
This is the query that I'm using-
Query -
{
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "*",
"analyze_wildcard": true
}
},
{
"match": {
"server-status.name.keyword": {
"query": "https-x509",
"type": "phrase"
}
}
},
{
"range": {
"server-status.meta.current-time": {
"gte": 1550660541174,
"lte": 1550674941175,
"format": "epoch_millis"
}
}
}
],
"must_not": []
}
},
"size": 0,
"_source": {
"excludes": []
},
"aggs": {
"2": {
"date_histogram": {
"field": "server-status.meta.current-time",
"interval": "1h",
"time_zone": "CST6CDT",
"min_doc_count": 1
},
"aggs": {
"4": {
"terms": {
"field": "server-status.type.keyword",
"include": "http-server",
"size": 500,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"sum": {
"field": "server-status.status-properties.request-rate.value",
"script": "_value/60"
}
},
"3": {
"terms": {
"field": "server-status.name.keyword",
"size": 5,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"sum": {
"field": "server-status.status-properties.request-rate.value",
"script": "_value/60"
}
}
}
}
}
}
}
}
}
}
I would like to shift the values by 1 hr. For example if the value is 2.0 at 2019-02-20T05:00:00.000-06:00 I want it to be displayed for 2019-02-20T06:00:00.000-06:00
Just a possible workaround:
Kibana display time based on browser timezone. You could set the timezone in Kibana configuration for a timezone of your interests.
Update:
You could use date_range aggregation and choose key for those buckets. You will need to generate the aggregation based on your time_range and interval.
For example:
"aggs": {
"range": {
"date_range": {
"field": "date",
"ranges": [
{
"key": "bucket1",
"to": "2016/02/01"
},
{
"key": "bucket2",
"from": "2016/02/01",
"to" : "now/d"
}
]
}
}
}
Reference: https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-daterange-aggregation.html
I would like to average out minutely data to hourly after a certain time period. For that what will be the query.
The query structure is -
GET ml_test_meters-2019_6/_search
{
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "*",
"analyze_wildcard": true
}
},
{
"range": {
"host-status.meta.current-time": {
"gte": 1549611907552,
"lte": 1549654551498,
"format": "epoch_millis"
}
}
}
],
"must_not": []
}
},
"size": 0,
"_source": {
"excludes": []
},
"aggs": {
"2": {
"date_histogram": {
"field": "host-status.meta.current-time",
"interval": "1h",
"time_zone": "US/Central",
"min_doc_count": 1
},
"aggs": {
"3": {
"terms": {
"field": "host-status.name.keyword",
"size": 500,
"order": {
"1": "desc"
}
},
"aggs": {
"1": {
"avg": {
"field": "host-status.status-properties.status-detail.total-cpu-stat-iowait"
}
}
}
}
}
}
}
}
What could be the possible solution ? I would like to insert the new data into the same index later on and delete minutely data.
I have a filtered elasticsearch query that works, but I want to use minimum_should_match to instruct ES to return only results that have at least 3 should matches. But I can't seem to figure out where to put minimum_should_match. Where should I put it?
{
"size": 100,
"sort": {
"price_monthly": "asc"
},
"query": {
"filtered": {
"query": {
"match_all": []
},
"filter": {
"bool": {
"must": [],
"should": [
[
{
"range": {
"mb.untouched": {
"gte": "0",
"lt": "500"
}
}
},
{
"range": {
"mb.untouched": {
"gte": "500",
"lt": "1000"
}
}
}
],
[
{
"range": {
"minutes.untouched": {
"gte": "0",
"lt": "100"
}
}
},
{
"range": {
"minutes.untouched": {
"gte": "200",
"lt": "300"
}
}
}
],
[
{
"range": {
"sms.untouched": {
"gte": "750",
"lt": "1000"
}
}
}
]
],
"must_not": {
"missing": {
"field": "provider.untouched"
}
}
}
},
"strategy": "query_first"
}
},
"aggs": {
"provider.untouched": {
"terms": {
"field": "provider.untouched"
}
},
"prolong.untouched": {
"terms": {
"field": "prolong.untouched"
}
},
"duration.untouched": {
"terms": {
"field": "duration.untouched"
}
},
"mb.untouched": {
"histogram": {
"field": "mb.untouched",
"interval": 500,
"min_doc_count": 1
}
},
"sms.untouched": {
"histogram": {
"field": "sms.untouched",
"interval": 250,
"min_doc_count": 1
}
},
"minutes.untouched": {
"histogram": {
"field": "minutes.untouched",
"interval": 100,
"min_doc_count": 1
}
},
"price_monthly.untouched": {
"histogram": {
"field": "price_monthly.untouched",
"interval": 5,
"min_doc_count": 1
}
}
}
}
In order to use minimum_should_match, you need to rewrite your filtered query a little bit, i.e. you need to move your should clause to the query part of the filtered query and just keep must_not in the filter part (because missing is a filter). Then you can add minimum_should_match: 3 in the bool query part as shown below:
{
"size": 100,
"sort": {
"price_monthly": "asc"
},
"query": {
"filtered": {
"query": {
"bool": {
"minimum_should_match": 3,
"must": [],
"should": [
[
{
"range": {
"mb.untouched": {
"gte": "0",
"lt": "500"
}
}
},
{
"range": {
"mb.untouched": {
"gte": "500",
"lt": "1000"
}
}
}
],
[
{
"range": {
"minutes.untouched": {
"gte": "0",
"lt": "100"
}
}
},
{
"range": {
"minutes.untouched": {
"gte": "200",
"lt": "300"
}
}
}
],
[
{
"range": {
"sms.untouched": {
"gte": "750",
"lt": "1000"
}
}
}
]
]
}
},
"filter": {
"bool": {
"must_not": {
"missing": {
"field": "provider.untouched"
}
}
}
},
"strategy": "query_first"
}
},
"aggs": {
"provider.untouched": {
"terms": {
"field": "provider.untouched"
}
},
"prolong.untouched": {
"terms": {
"field": "prolong.untouched"
}
},
"duration.untouched": {
"terms": {
"field": "duration.untouched"
}
},
"mb.untouched": {
"histogram": {
"field": "mb.untouched",
"interval": 500,
"min_doc_count": 1
}
},
"sms.untouched": {
"histogram": {
"field": "sms.untouched",
"interval": 250,
"min_doc_count": 1
}
},
"minutes.untouched": {
"histogram": {
"field": "minutes.untouched",
"interval": 100,
"min_doc_count": 1
}
},
"price_monthly.untouched": {
"histogram": {
"field": "price_monthly.untouched",
"interval": 5,
"min_doc_count": 1
}
}
}
}