Sorting large array in power automate - power-automate

I have around 700 resources of which I have metrics array containing 1300+ values , float values
I am trying to compute 80th percentile of this large array of values in Power automate.
My issue is its taking 10-15 min per case, also I have to construct a separate array as my source array sometimes has non existing metric property
my source array looks like this (having 1392 elements)
{
"data":[
{
"timestamp":"2022-11-11T07:00:00Z",
"average":34.4
},
{
"timestamp": "2022-12-10T06:00:00Z"
}
]}
I tried various ways
I made another array first , wherever average was not present , I added a zero to result array
then sorted this array ( via sort expression)
applied formula for 80th percentile to get right index
extracted that index value from array.
Step 1 is taking a lot of time unfortunately , Apply to each is being used with max concurrency.
Please advise how we can make this faster.
Thanks

I think you should look at using the Advanced Data Operations connector.
There's an expert function that allows you to run an SQL statement over your dataset. You could filter this data further to everything above the 80th percentile by simply adding a WHERE clause at the end.
This is the flow I tested with ...
... so in that single step, it will do everything you want it to.
This is the SQL statement supplied in the screenshot above ...
SELECT *, PERCENT_RANK() OVER(ORDER BY [Average]) AS [Percentile]
FROM (
SELECT * FROM [Data] WHERE [Average] IS NOT NULL
) AS [NewData]
ORDER BY [Percentile] DESC
Note: To make sure it's producing the correct result, I've removed all items that have a null average value.
This is the resulting flow which took a mere 6 seconds to run ...
... and here is the resulting JSON array with the data ...
[
{
"average": 22.674375,
"timeStamp": "2022-11-11T07:30:00",
"Percentile": 1
},
{
"average": 22.1976666666667,
"timeStamp": "2022-11-11T19:30:00",
"Percentile": 0.985915492957746
},
{
"average": 22.1189166666667,
"timeStamp": "2022-11-11T07:00:00",
"Percentile": 0.971830985915493
},
{
"average": 21.85075,
"timeStamp": "2022-11-11T08:30:00",
"Percentile": 0.957746478873239
},
{
"average": 21.2718020833333,
"timeStamp": "2022-11-12T00:00:00",
"Percentile": 0.943661971830986
},
{
"average": 21.2456145833333,
"timeStamp": "2022-11-11T16:30:00",
"Percentile": 0.929577464788732
},
{
"average": 21.1545520833333,
"timeStamp": "2022-11-11T09:30:00",
"Percentile": 0.915492957746479
},
{
"average": 21.09884375,
"timeStamp": "2022-11-11T15:00:00",
"Percentile": 0.901408450704225
},
{
"average": 21.0786145833333,
"timeStamp": "2022-11-12T09:30:00",
"Percentile": 0.887323943661972
},
{
"average": 20.9709270833333,
"timeStamp": "2022-11-11T19:00:00",
"Percentile": 0.873239436619718
},
{
"average": 20.7876979166667,
"timeStamp": "2022-11-12T06:30:00",
"Percentile": 0.859154929577465
},
{
"average": 20.7539270833333,
"timeStamp": "2022-11-12T05:00:00",
"Percentile": 0.845070422535211
},
{
"average": 20.724375,
"timeStamp": "2022-11-12T16:30:00",
"Percentile": 0.830985915492958
},
{
"average": 20.7124479166667,
"timeStamp": "2022-11-11T15:30:00",
"Percentile": 0.816901408450704
},
{
"average": 20.7032708333333,
"timeStamp": "2022-11-12T13:30:00",
"Percentile": 0.802816901408451
},
{
"average": 20.6885104166667,
"timeStamp": "2022-11-11T08:00:00",
"Percentile": 0.788732394366197
},
{
"average": 20.6832083333333,
"timeStamp": "2022-11-11T13:30:00",
"Percentile": 0.774647887323944
},
{
"average": 20.6540208333333,
"timeStamp": "2022-11-11T10:30:00",
"Percentile": 0.76056338028169
},
{
"average": 20.6439166666667,
"timeStamp": "2022-11-12T04:30:00",
"Percentile": 0.746478873239437
},
{
"average": 20.59803125,
"timeStamp": "2022-11-12T07:00:00",
"Percentile": 0.732394366197183
},
{
"average": 20.5738125,
"timeStamp": "2022-11-11T06:30:00",
"Percentile": 0.71830985915493
},
{
"average": 20.5018645833333,
"timeStamp": "2022-11-12T00:30:00",
"Percentile": 0.704225352112676
},
{
"average": 20.500875,
"timeStamp": "2022-11-11T14:30:00",
"Percentile": 0.690140845070423
},
{
"average": 20.46678125,
"timeStamp": "2022-11-11T16:00:00",
"Percentile": 0.676056338028169
},
{
"average": 20.43015625,
"timeStamp": "2022-11-12T07:30:00",
"Percentile": 0.661971830985916
},
{
"average": 20.4273020833333,
"timeStamp": "2022-11-11T11:00:00",
"Percentile": 0.647887323943662
},
{
"average": 20.4161041666667,
"timeStamp": "2022-11-11T21:30:00",
"Percentile": 0.633802816901409
},
{
"average": 20.3894791666667,
"timeStamp": "2022-11-11T10:00:00",
"Percentile": 0.619718309859155
},
{
"average": 20.3787604166667,
"timeStamp": "2022-11-11T17:30:00",
"Percentile": 0.605633802816901
},
{
"average": 20.3724270833333,
"timeStamp": "2022-11-11T09:00:00",
"Percentile": 0.591549295774648
},
{
"average": 20.3398333333333,
"timeStamp": "2022-11-12T12:30:00",
"Percentile": 0.577464788732394
},
{
"average": 20.2841145833333,
"timeStamp": "2022-11-12T08:30:00",
"Percentile": 0.563380281690141
},
{
"average": 20.2529449152542,
"timeStamp": "2022-11-11T13:00:00",
"Percentile": 0.549295774647887
},
{
"average": 20.2286875,
"timeStamp": "2022-11-11T17:00:00",
"Percentile": 0.535211267605634
},
{
"average": 20.22403125,
"timeStamp": "2022-11-11T14:00:00",
"Percentile": 0.52112676056338
},
{
"average": 20.2209375,
"timeStamp": "2022-11-11T11:30:00",
"Percentile": 0.507042253521127
},
{
"average": 20.1904895833333,
"timeStamp": "2022-11-12T06:00:00",
"Percentile": 0.492957746478873
},
{
"average": 20.1900729166667,
"timeStamp": "2022-11-11T12:30:00",
"Percentile": 0.47887323943662
},
{
"average": 20.148125,
"timeStamp": "2022-11-12T01:30:00",
"Percentile": 0.464788732394366
},
{
"average": 20.129625,
"timeStamp": "2022-11-11T23:30:00",
"Percentile": 0.450704225352113
},
{
"average": 20.0616666666667,
"timeStamp": "2022-11-12T14:30:00",
"Percentile": 0.436619718309859
},
{
"average": 20.0449895833333,
"timeStamp": "2022-11-12T10:00:00",
"Percentile": 0.422535211267606
},
{
"average": 20.0333645833333,
"timeStamp": "2022-11-11T12:00:00",
"Percentile": 0.408450704225352
},
{
"average": 19.9994583333333,
"timeStamp": "2022-11-11T21:00:00",
"Percentile": 0.394366197183099
},
{
"average": 19.9905,
"timeStamp": "2022-11-12T11:00:00",
"Percentile": 0.380281690140845
},
{
"average": 19.9578125,
"timeStamp": "2022-11-12T09:00:00",
"Percentile": 0.366197183098592
},
{
"average": 19.94271875,
"timeStamp": "2022-11-12T05:30:00",
"Percentile": 0.352112676056338
},
{
"average": 19.9379895833333,
"timeStamp": "2022-11-12T10:30:00",
"Percentile": 0.338028169014084
},
{
"average": 19.91659375,
"timeStamp": "2022-11-11T18:00:00",
"Percentile": 0.323943661971831
},
{
"average": 19.8985104166667,
"timeStamp": "2022-11-12T01:00:00",
"Percentile": 0.309859154929577
},
{
"average": 19.8849479166667,
"timeStamp": "2022-11-12T14:00:00",
"Percentile": 0.295774647887324
},
{
"average": 19.8658229166667,
"timeStamp": "2022-11-12T15:00:00",
"Percentile": 0.28169014084507
},
{
"average": 19.85403125,
"timeStamp": "2022-11-11T18:30:00",
"Percentile": 0.267605633802817
},
{
"average": 19.8325,
"timeStamp": "2022-11-11T22:30:00",
"Percentile": 0.253521126760563
},
{
"average": 19.7983958333333,
"timeStamp": "2022-11-12T04:00:00",
"Percentile": 0.23943661971831
},
{
"average": 19.7926145833333,
"timeStamp": "2022-11-12T15:30:00",
"Percentile": 0.225352112676056
},
{
"average": 19.780875,
"timeStamp": "2022-11-11T20:30:00",
"Percentile": 0.211267605633803
},
{
"average": 19.7798333333333,
"timeStamp": "2022-11-11T23:00:00",
"Percentile": 0.197183098591549
},
{
"average": 19.76359375,
"timeStamp": "2022-11-12T11:30:00",
"Percentile": 0.183098591549296
},
{
"average": 19.7552604166667,
"timeStamp": "2022-11-12T08:00:00",
"Percentile": 0.169014084507042
},
{
"average": 19.695125,
"timeStamp": "2022-11-12T13:00:00",
"Percentile": 0.154929577464789
},
{
"average": 19.50509375,
"timeStamp": "2022-11-11T20:00:00",
"Percentile": 0.140845070422535
},
{
"average": 19.4972291666667,
"timeStamp": "2022-11-12T16:00:00",
"Percentile": 0.126760563380282
},
{
"average": 19.4948541666667,
"timeStamp": "2022-11-11T22:00:00",
"Percentile": 0.112676056338028
},
{
"average": 19.4910104166667,
"timeStamp": "2022-11-12T12:00:00",
"Percentile": 0.0985915492957746
},
{
"average": 19.4601979166667,
"timeStamp": "2022-11-12T03:00:00",
"Percentile": 0.0845070422535211
},
{
"average": 19.2794375,
"timeStamp": "2022-11-12T03:30:00",
"Percentile": 0.0704225352112676
},
{
"average": 15.9129791666667,
"timeStamp": "2022-11-12T02:30:00",
"Percentile": 0.0563380281690141
},
{
"average": 15.20909375,
"timeStamp": "2022-11-12T02:00:00",
"Percentile": 0.0422535211267606
},
{
"average": 11.3621875,
"timeStamp": "2022-11-12T17:00:00",
"Percentile": 0.028169014084507
},
{
"average": 9.31830208333333,
"timeStamp": "2022-11-12T17:30:00",
"Percentile": 0.0140845070422535
},
{
"average": 9.08600961538462,
"timeStamp": "2022-11-12T18:00:00",
"Percentile": 0
}
]
It should be noted that the Expert function is a part of the Standard pricing model. It's very affordable but whether you deem that so or not is very much relative.

Related

The Vega-lite x-axis displays the specified number of data

Here is my generated pic, my x-axis is too large, so if I wanna display specified number, how can I fix my code?
I tried to cut my data source from ES DSL as my breakthrough point, but cumulative_sum needs complete data source.
{
"$schema": "https://vega.github.io/schema/vega-lite/v2.json",
"data": {
"url": {
"index": "x-*",
"body": {
"query": {"match_all": {}},
"size": 0,
"aggs": {
"group_by_date": {
"date_histogram": {
"field": "timestamp",
"interval": "day"
},
"aggs": {
"cumulative_docs": {
"cumulative_sum": {"buckets_path": "_count"}
}
}
}
}
}
},
"format": {
"property": "aggregations.group_by_date.buckets"
}
},
"width": "container",
"height": 1200,
"layer": [
{
"mark": {
"type": "line",
"point": {"filled": false, "fill": "black"}
}
},
{
"mark": {
"type": "text",
"align": "left",
"baseline": "middle",
"fontSize": 15
},
"encoding": {
"text": {"field": "cumulative_docs.value", "type": "quantitative"}
}
}
],
"encoding": {
"x": {
"axis": {"title": "date"},
"field": "key_as_string",
"type": "nominal"
},
"y": {
"aggregate": "y",
"axis": {"title": "project_num"},
"field": "cumulative_docs.value",
"type": "quantitative",
"stack": "normalize"
}
}
}
It is difficult to debug without actual data but what happens if you change this to temporal?
"x": {
"axis": {"title": "date"},
"field": "key_as_string",
"type": "temporal"
},
EDIT
Remove this section.
{
"mark": {
"type": "text",
"align": "left",
"baseline": "middle",
"fontSize": 15
},
"encoding": {
"text": {"field": "cumulative_docs.value", "type": "quantitative"}
}
}

How to pass variable `ctx.payload.hits.total` of the 'hit' log into transform-script of webhook watcher?

I have following webhook watcher which is working perfectly creating OTRS ticket when word "Error" appears in logs.
{
"trigger": {
"schedule": {
"interval": "1m"
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"must": [
{
"query_string": {
"query": "Error"
}
},
{
"range": {
"#timestamp": {
"gte": "now-1m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gte": 1
}
}
},
"actions": {
"create_otrs": {
"transform": {
"script": {
"source": "return ['Ticket':['Queue':'Engineering Team','Priority':'P3','CustomerUser':'root','Title':'RESTCreateTest','State':'new','Type':'Incident'],'Article':['ContentType':'text/plain;charset=utf8','Subject':'RestCreateTest','Body':'This is only a test']]",
"lang": "painless"
}
},
"webhook": {
"scheme": "http",
"host": "myotrs.com",
"port": 80,
"method": "post",
"path": "/otrs/GenericTicketConnectorREST/User=<User>&Pass=<Password>",
},
"headers": {},
"body": "{{#toJson}}ctx.payload{{/toJson}}"
}
}
}
}
However, right now functionality is limited as it creates ticket with fixed body This is only a test. I want to pass the exact error message body of the log into the ticket body. How to pass variables into script body?
I have included one of the hits, it also shows the document I am trying to access. I need the "ctx.payload.hits.total" part of the document-hit to be the body of the ticket.
{
"_index": ".ds-logs-elastic_agent.filebeat-default",
"_source": {
"input_source": "https://ser.example.com:80/export",
"agent": {
"name": "syslog01",
"id": "5836558b-b17d-445e",
"type": "filebeat",
"ephemeral_id": "36bdfeca-3c60",
"version": "8.3.3"
},
"service.name": "filebeat",
"log": {
"file": {
"path": "/opt/Elastic/Agent/data/elastic-agent-0ffbed/logs/default/filebeat-20230127-12.ndjson"
},
"offset": 248078415
},
"elastic_agent": {
"id": "5836558b-b17d",
"version": "8.3.3",
"snapshot": false
},
"message": """Error while processing http request: failed to execute rf.collectResponse: failed to execute http client.Do: failed to execute http client.Do: Post "https://ser.example.com:80/export": POST https://ser.example.com:80/export giving up after 6 attempts""",
"log.logger": "input.httpjson-cursor",
"input": {
"type": "filestream"
},
"log.origin": {
"file.line": 128,
"file.name": "httpjson/input.go"
},
"#timestamp": "2023-01-27T14:44:42.790Z",
"ecs": {
"version": "8.0.0"
},
"data_stream": {
"namespace": "default",
"type": "logs",
"dataset": "elastic_agent.filebeat"
},
"host": {
"hostname": "syslog01",
"os": {
"kernel": "3.10.25-gentoo",
"name": "Gentoo",
"type": "linux",
"family": "",
"version": "",
"platform": "gentoo"
},
"containerized": false,
"log.level": "error",
"input_url": "https://ser.example.com:8089/export",
"id": "httpjson-system.security-ba2ec41b-457b-442a",
"event": {
"agent_id_status": "verified",
"ingested": "2023-01-27T14:44:58Z",
"dataset": "elastic_agent.filebeat"
}
},
"_id": "pCWw84UB8FDLddfs",
"_score": 2.2840834
}
}

ElasticSearch multilayer nested properties

I have an index mapping like this
"mappings": {
"properties": {
"filter": {
"type": "nested",
"properties": {
"Hersteller": {
"type": "nested",
"properties": {
"id": {
"type": "text",
"analyzer": "analyzerFilter",
"fielddata": true
},
"value": {
"type": "text",
"analyzer": "analyzerFilter",
"fielddata": true
}
}
},
"Modell": {
"type": "nested",
"properties": {
"id": {
"type": "text",
"analyzer": "analyzerFilter",
"fielddata": true
},
"value": {
"type": "text",
"analyzer": "analyzerFilter",
"fielddata": true
}
}
}
}
},
"id": {
"type": "text",
"analyzer": "analyzerFilter"
}
}
}
}
There are 2 nested layers filter.Modell. I need a query to get all unique filter.Modell.value where filter.Hersteller.value is equal some predefined value.
I am trying first without any condition
{
"size": 4,
"aggs": {
"distinct_filter": {
"nested": { "path": "filter" },
"aggs": {
"distinct_filter_modell": {
"nested": {
"path": "filter.Modell",
"aggs": {
"distinct_filter_modell_value": {
"terms": { "field": "filter.Modell.value" }
}
}
}
}
}
}
}
}
And I get issue like
{
"error": {
"root_cause": [
{
"type": "parsing_exception",
"reason": "Unexpected token START_OBJECT in [distinct_filter_modell].",
"line": 1,
"col": 144
}
],
"type": "parsing_exception",
"reason": "Unexpected token START_OBJECT in [distinct_filter_modell].",
"line": 1,
"col": 144
},
"status": 400
}
Thanks in advance

Elastic Watcher not returning results

I am trying to simulate a watch and see if the actions are triggering fine. But my problem is the search returns no results.
My query
Checks for a particular index.
Checks for a range
Check for the servicename field to be a particular value.
This is my watch definition
{
"trigger": {
"schedule": {
"interval": "10m"
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"body": {
"query": {
"bool": {
"filter": [
{
"term": {
"level": {
"value": "ERROR"
}
}
},
{
"term": {
"servicename": [
"Iit.Det.Urm.MepsSubscriber"
]
}
},
{
"range": {
"#timestamp": {
"gte": "now-60m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gt": 0
}
}
},
"actions": {
"notify-slack": {
"slack": {
"account": "elastic_watcher_alerts",
"proxy": {
"host": "proxy.dom",
"port": 80
},
"message": {
"from": "Error Monitor",
"to": [
"#det-errors"
],
"text": "The following error(s) have been logged",
"dynamic_attachments": {
"list_path": "ctx.payload.items",
"attachment_template": {
"color": "#f00",
"title": "{{msg}}",
"title_link": "https://elastic.mid.dom:port/{{index}}/doc/{{id}}?pretty",
"text": "{{msg}}",
"fields": [
{
"title": "Server",
"value": "{{host}}",
"short": true
},
{
"title": "Servicename",
"value": "{{service}}",
"short": true
}
]
}
}
}
}
}
},
"transform": {
"script": {
"source": "['items': ctx.payload.hits.hits.collect(hit -> ['msg': hit._source.message, 'service': hit._source.servicename, 'index': hit._index, 'id' : hit._id, 'host': hit._source.agent.hostname ])]",
"lang": "painless"
}
}
}
I am trying to now test it by using the simulate option and giving it an input. This input is copied from actual data that is in the index. I copied a json document from kibana (in the discover section), so the alternate input json should be ok
Here's the alternative input
{
"_index": "datasolutions-svc-live-7.7.0-2021.01",
"_type": "doc",
"_id": "Hre9SHcB1QIqYEnyxSCw",
"_version": 1,
"_score": null,
"_source": {
"exception": "System.Data.SqlClient.SqlException (0x80131904): blabla",
"agent": {
"hostname": "SATSVC3-DK1",
"name": "datasolutions-svc-live",
"id": "8c826ae1-e411-4257-a31f-08824dd58b5a",
"type": "filebeat",
"ephemeral_id": "e355bf8a-be67-4ed1-85f4-b9043674700e",
"version": "7.7.0"
},
"log": {
"file": {
"path": "D:\\logs\\7DaysRetention\\Iit.Det.Urm.MepsSubscriber\\Iit.Det.Urm.MepsSubscriber.log.20210128.log"
},
"offset": 17754757
},
"level": "ERROR",
"message": "Error while starting service.",
"#timestamp": "2021-02-17T10:00:28.343Z",
"ecs": {
"version": "1.5.0"
},
"host": {
"name": "datasolutions-svc-live"
},
"servicename": "Iit.Det.Urm.MepsSubscriber",
"codelocation": "Iit.Det.Urm.MepsSubscriber.MepsSubscriberService.OnStart:29"
},
"fields": {
"#timestamp": [
"2021-02-17T10:00:28.343Z"
]
},
"highlight": {
"servicename": [
"#kibana-highlighted-field#Iit.Det.Urm.MepsSubscriber#/kibana-highlighted-field#"
]
},
"sort": [
1611833128343
]
}
But when I run "simulate", I get the ctx.payload.total.hits as null because apparently it does not find any results. Result of the simulate-
{
"watch_id": "_inlined_",
"node": "eMS-E34eT4-zZhGwtPNSmw",
"state": "execution_not_needed",
"user": "sum",
"status": {
"state": {
"active": true,
"timestamp": "2021-02-17T10:57:04.077Z"
},
"last_checked": "2021-02-17T10:57:04.077Z",
"actions": {
"notify-slack": {
"ack": {
"timestamp": "2021-02-17T10:57:04.077Z",
"state": "awaits_successful_execution"
}
}
},
"execution_state": "execution_not_needed",
"version": -1
},
"trigger_event": {
"type": "manual",
"triggered_time": "2021-02-17T10:57:04.077Z",
"manual": {
"schedule": {
"scheduled_time": "2021-02-17T10:57:04.077Z"
}
}
},
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"filter": [
{
"term": {
"level": {
"value": "ERROR"
}
}
},
{
"term": {
"servicename": [
"Iit.Det.Urm.MepsSubscriber"
]
}
},
{
"range": {
"#timestamp": {
"gte": "now-60m"
}
}
}
]
}
}
}
}
}
},
"condition": {
"compare": {
"ctx.payload.hits.total": {
"gt": 0
}
}
},
"metadata": {
"name": "datasolutions-svc-mepssubscriber",
"xpack": {
"type": "json"
}
},
"result": {
"execution_time": "2021-02-17T10:57:04.077Z",
"execution_duration": 0,
"input": {
"type": "simple",
"status": "success",
"payload": {
"highlight": {
"servicename": [
"#kibana-highlighted-field#Iit.Det.Urm.MepsSubscriber#/kibana-highlighted-field#"
]
},
"_index": "datasolutions-svc-live-7.7.0-2021.01",
"_type": "doc",
"_source": {
"exception": "System.Data.SqlClient.SqlException (0x80131904): blabla",
"agent": {
"hostname": "SATSVC3-DK1",
"name": "datasolutions-svc-live",
"id": "8c826ae1-e411-4257-a31f-08824dd58b5a",
"type": "filebeat",
"ephemeral_id": "e355bf8a-be67-4ed1-85f4-b9043674700e",
"version": "7.7.0"
},
"#timestamp": "2021-02-17T10:00:28.343Z",
"ecs": {
"version": "1.5.0"
},
"log": {
"file": {
"path": "D:\\logs\\7DaysRetention\\Iit.Det.Urm.MepsSubscriber\\Iit.Det.Urm.MepsSubscriber.log.20210128.log"
},
"offset": 17754757
},
"level": "ERROR",
"host": {
"name": "datasolutions-svc-live"
},
"servicename": "Iit.Det.Urm.MepsSubscriber",
"message": "Error while starting service.",
"codelocation": "Iit.Det.Urm.MepsSubscriber.MepsSubscriberService.OnStart:29"
},
"_id": "Hre9SHcB1QIqYEnyxSCw",
"sort": [
1611833128343
],
"_score": null,
"fields": {
"#timestamp": [
"2021-02-17T10:00:28.343Z"
]
},
"_version": 1
}
},
"condition": {
"type": "compare",
"status": "success",
"met": false,
"compare": {
"resolved_values": {
"ctx.payload.hits.total": null
}
}
},
"actions": []
},
"messages": []
}
I am not sure what can't it find the results. Can someone tell me what is it that I am doing wrong?
I was able to solve it using the "inspect" section of discover page of the index.
Finally my input for the watcher query had to be changed to
"input": {
"search": {
"request": {
"search_type": "query_then_fetch",
"indices": [
"datasolutions-svc-*"
],
"rest_total_hits_as_int": true,
"body": {
"query": {
"bool": {
"must": [],
"filter": [
{
"bool": {
"should": [
{
"match_phrase": {
"servicename": "Iit.Det.Urm.MepsSubscriber"
}
}
],
"minimum_should_match": 1
}
},
{
"match_phrase": {
"level": "ERROR"
}
},
{
"range": {
"#timestamp": {
"gte": "now-10m",
"format": "strict_date_optional_time"
}
}
}
],
"should": [],
"must_not": []
}
}
}
}
}
}

Filter elasticsearch by range of date on a string property

I need to query elasticsearch & filter the result to be in a range of dates.
the thing is the date property is mapped as a string.
is it possible to do so ?
this is the search query i'm using:
{
"size": 1,
"from": 0,
"query": {
"bool": {
"must": [
{ "match": { "status": "active" }},
{ "match": { "last_action_state": "accepted" }}
],
"filter": [
{"missing" : { "field" : "store_id" }},
{ "range": { "list_time": { "gte": "2017/01/01 00:00:00", "lte": "2017/03/01 23:59:59", "format": "yyyy/MM/dd HH:mm:ss"}}}
]
}
}
}
the thing is i have no control over the mapping since it's created automatically by another program which index the documents, and i can't change the mapping once it's created.
ps: elasticsearch version: 2.3
UPDATE:
index info:
{
"avindex_v3": {
"aliases": {
"avindex": {}
},
"mappings": {
"ads": {
"properties": {
"account_id": {
"type": "long"
},
"ad_id": {
"type": "long"
},
"ad_params": {
"type": "string"
},
"body": {
"type": "string"
},
"category": {
"type": "long"
},
"city": {
"type": "long"
},
"company_ad": {
"type": "boolean"
},
"email": {
"type": "string"
},
"images": {
"type": "string"
},
"lang": {
"type": "string"
},
"last_action_state": {
"type": "string"
},
"list_date": {
"type": "long"
},
"list_id": {
"type": "long"
},
"list_time": {
"type": "string"
},
"modified_at": {
"type": "string"
},
"modified_ts": {
"type": "double"
},
"name": {
"type": "string"
},
"orig_date": {
"type": "long"
},
"orig_list_time": {
"type": "string"
},
"phone": {
"type": "string"
},
"phone_hidden": {
"type": "boolean"
},
"price": {
"type": "long"
},
"region": {
"type": "long"
},
"status": {
"type": "string"
},
"store_id": {
"type": "long"
},
"subject": {
"type": "string"
},
"type": {
"type": "string"
},
"user_id": {
"type": "long"
}
}
}
},
"settings": {
"index": {
"creation_date": "1493216710928",
"number_of_shards": "5",
"number_of_replicas": "1",
"uuid": "WEHGLF8iRyGk3Xgbmo7H8Q",
"version": {
"created": "2040499"
}
}
},
"warmers": {}
}
}
You can try to give it as a keyword like this :
{
"range": {
"list_time.keyword": {
"gte": "2020-08-12 22:24:55.56",
"lte": "2020-08-12 22:24:56.56"
}
}
}

Resources