How to calcuate starttime & endtime using painless script - elasticsearch

I am working with ELK and i have created index pattern with build_date. Now to calculate the Avg of build duration, i need to find the start-time and end-time in minutes using painless script.
My logstash output data given below
"build_end_time" => "2021-01-13 01:29:49",
"build_duration" => "6409651",
"build_start_time" => "2021-01-12 23:43:00",
"build_date" => "2021-01-12",
"#timestamp" => 2021-02-02T11:40:50.747Z,
Scripted field settings given below.
Name: Duration_time
Language: painless
Type: number
Format: Duration
Input format: minutes
Output format: Human readable
Popularity: 0
Script: def doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay
it throws - Script is invalid.
{
"root_cause": [
{
"type": "script_exception",
"reason": "compile error",
"script_stack": [
"def doc['build_end_time'].date.m ...",
" ^---- HERE"
],
"script": "def doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay",
"lang": "painless",
"position": {
"offset": 7,
"start": 0,
"end": 32
}
}
],
"type": "search_phase_execution_exception",
"reason": "all shards failed",
"phase": "query",
"grouped": true,
"failed_shards": [
{
"shard": 0,
"index": "build-logs",
"node": "JSvuaBbCQr6uI5qKvavj7Q",
"reason": {
"type": "script_exception",
"reason": "compile error",
"script_stack": [
"def doc['build_end_time'].date.m ...",
" ^---- HERE"
],
"script": "def doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay",
"lang": "painless",
"position": {
"offset": 7,
"start": 0,
"end": 32
},
"caused_by": {
"type": "illegal_argument_exception",
"reason": "invalid sequence of tokens near ['['].",
"caused_by": {
"type": "no_viable_alt_exception",
"reason": null
}
}
}
}
]
}
Can someone help me to work this?

This looks like a syntactic error.
Instead of
def doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay
use
return doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay
The return is actually not required -- you can leave it out entirely.
The def keyword defines something. So you could in theory say:
def result = doc['build_end_time'].date.millisOfDay - doc['build_start_time'].date.millisOfDay
but you'd need to return something -- so:
def result = '...'; return result

Related

Elasticsearch failed to execute script

Elasticsearch version 7.7.0
This is the part of the mapping:
const PROFILE_MAPPING = {
mappings: {
properties: {
_userLocation: {
type: "geo_point"
},
_ignoredBy: {
type: "nested"
}
}
}
};
_ignoredBy data example:
[{
"until" : "2020-12-03T16:20:43.176Z",
"user" : <USER_ID>
}]
and this is the script I'm running to update it:
await client.update({
index,
id: target,
refresh: "wait_for",
body: {
script: {
source:
"ctx._source._ignoredBy.removeIf(item -> item.user ==
params.by.user);ctx._source._ignoredBy.add(params.by)",
params: {
by: {
user: initiator,
until: addSeconds(new Date(), ignoreInterval)
}
}
}
}
});
and this is the error I'm getting:
{
"error": {
"root_cause": [
{
"type": "illegal_argument_exception",
"reason": "failed to execute script"
}
],
"type": "illegal_argument_exception",
"reason": "failed to execute script",
"caused_by": {
"type": "script_exception",
"reason": "runtime error",
"script_stack": ["item -> item.user == params.by.user);", "^---- HERE"],
"script": "ctx._source._ignoredBy.removeIf(item -> item.user == params.by.user);ctx._source._ignoredBy.add(params.by)",
"lang": "painless",
"position": { "offset": 32, "start": 32, "end": 69 },
"caused_by": { "type": "null_pointer_exception", "reason": null }
}
},
"status": 400
}
The weird thing is that this works 99% of the time but errors are appearing on logs and can't figure out what's the reason. The params passed in are 100% there as they appear on logs.
Such null pointers are hard to wrap one's head around but my hunch is that there's something off with ctx._source._ignoredBy itself.
In that spirit, I'd suggest to add one more check before I'm calling .removeIf on it -- perhaps initialize it in case it's null:
{
"script": {
"source": "if (ctx._source._ignoredBy == null) {ctx._source._ignoredBy = []; } ctx._source._ignoredBy.removeIf(item -> item.user == params.by.user); ctx._source._ignoredBy.add(params.by)",
"params": {
...
}
}
}

How to update by query with script and nested new fields in elasticsearch?

I need to update my mapping in elastic
here is example:
current mapping
{
filed1: 6,
filed2: "some string"
}
I need update it to this
{
outer: {
filed1: 6,
filed2: "some string"
}
}
I do it with update_by_query api and this request
{
"script": {
"source": "ctx._source.outer.field1 = ctx._source.field1; ctx._source.outer.field2 = ctx._source.field2;",
"lang": "painless"
},
}
but I got null pointer exception because there is no outer in documents yet
"type": "script_exception",
"reason": "compile error",
"script_stack": [
"... ctx._source.outer.fiel ...",
" ^---- HERE"
],
How could I change request?
You need to do it this way:
"source": "ctx._source.outer = ['field1': ctx._source.remove('field1'), 'field2': ctx._source.remove('field2')];",

How to update a field with different values based on previous value in Elasticsearch?

I want to update a field with a new value depending on its previous value. E.g: if field 'set' values are either 'aaa' or 'bbb', I want to provide a list of new values so that, say, 'aaa' becomes 'ccc' and 'bbb' becomes 'ddd'.
This query is were I am stuck:
POST my_index/_update_by_query?conflicts=proceed
{
"query": {
"terms": {"set.keyword": ["aaa", "bbb"]}
},
"script": {
"inline": "ctx._source.set = 'ccc'; ctx._source.set = 'ddd';"
}
}
Instead of getting different updated values ('ccc' or 'ddd' depending on which was the previous value), all values are updated to 'ddd'. I suspect it is updating all values twice.
Using Val's query below, I get the following output:
{
"error": {
"root_cause": [
{
"type": "script_exception",
"reason": "runtime error",
"script_stack": [
"ctx._source.set = ctx._source.set.stream().map(elem -> {\n ",
" ^---- HERE"
],
"script": " ctx._source.set = ctx._source.set.stream().map(elem -> {\n if (params[elem] != null) {\n return params[elem];\n } else {\n return elem;\n }\n }).collect(Collectors.toList());",
"lang": "painless"
}
],
"type": "script_exception",
"reason": "runtime error",
"script_stack": [
"ctx._source.set = ctx._source.set.stream().map(elem -> {\n ",
" ^---- HERE"
],
"script": " ctx._source.set = ctx._source.set.stream().map(elem -> {\n if (params[elem] != null) {\n return params[elem];\n } else {\n return elem;\n }\n }).collect(Collectors.toList());",
"lang": "painless",
"caused_by": {
"type": "illegal_argument_exception",
"reason": "Unable to find dynamic method [stream] with [0] arguments for class [java.lang.String]."
}
},
"status": 500
}
Mapping does not explicitly mention 'set' field:
MY_MAPPING = '''{
"mappings": {
"data_type": {
"properties": {
"delivered": {
"type": "date",
"format": "yyyy-MM-dd"
},
"requested": {
"type": "date",
"format": "yyyy-MM-dd"
},
"location": {
"type": "geo_point"
}
}
}
}
}'''
Taking a look at my index, I have 'set' as a searchable string and 'set.keyword', also a string, that is searchable and aggregatable.
I would do it like this:
POST my_index/_update_by_query?conflicts=proceed
{
"query": {
"terms": {"set.keyword": ["aaa", "bbb"]}
},
"script": {
"source": """
def currentSet = ctx._source.set;
ctx._source.set = (params[currentSet] != null) ? params[currentSet] : currentSet;
""",
"params": {
"aaa": "ccc",
"bbb": "ddd"
}
}
}
In other terms, the script will iterate over the set array and for each element, it will return whatever new value is in the params hash for a given old value, or the old value itself if there's no new value.
If your set is ["aaa", "bbb", "xxx"] then after updating your index, it would contain ["ccc", "ddd", "xxx"]

Using ElasticSearch painless scripting to compute dayOfWeek or timeOfDay

I use ES 5.1.2 and I'm trying to compute day of week and time of day from a date field and consider timezone at the same time.
my first script is def d = doc['my_field'].date; d.addHours(10); d.getDayOfWeek();
The error message is can't find addHours() method
"caused_by": {
"type": "illegal_argument_exception",
"reason": "Unable to find dynamic method [addHours] with [1] arguments for class [org.joda.time.MutableDateTime]."
},
"script_stack": [
"d.addHours(10); ",
" ^---- HERE"
],
If I change script to MutableDateTime d = doc['my_field'].date; d.addHours(10); d.getDayOfWeek(); The error message becomes
"caused_by": {
"type": "illegal_argument_exception",
"reason": "unexpected token ['d'] was expecting one of [{<EOF>, ';'}]."
},
"script_stack": [
"MutableDateTime d = doc['relation_denstu. ...",
" ^---- HERE"
],
Without addHours to adjust timezone, everything is fine. But if I try to adjust timezone dynamically, everything failed. Any help?
I've been struggling with it as well. This works in Elastic 5:
GET /unittesttg1_tg1_fq1/_search
{
"size": 0,
"aggs": {
"groupby": {
"terms": {
"script": "ZonedDateTime.ofInstant(Instant.ofEpochMilli(doc['LAST_MODIFIED_DATE'].value), ZoneId.of('+10:00')).getDayOfWeek()"
}
}
}
}

elasticsearch:script sometimes works ok sometimes throw an exception

My elasticsearch script sometimes works ok,and sometimes throw an exception,such as:
{
"error": {
"root_cause": [
{
"type": "remote_transport_exception",
"reason": "[es77][ip:9300] [indices:data/write/update[s]]"
}
],
"type": "illegal_argument_exception",
"reason": "failed to execute script",
"caused_by": {
"type": "script_exception",
"reason": "failed to run inline script [newArray = [];ctx._source.CILastCallResultRemark?.each{ obj->if(obj.id!=item.id){newArray=newArray+obj} }; (ctx._source.CILastCallResultRemark=newArray+item)] using lang [groovy]",
"caused_by": {
"type": "no_class_def_found_error",
"reason": "sun/reflect/MethodAccessorImpl",
"caused_by": {
"type": "class_not_found_exception",
"reason": "sun.reflect.MethodAccessorImpl"
}
}
}
},
"status": 400
}
Here is the script:
{
"script": {
"inline": "newArray = [];ctx._source.CILastCallResultRemark?.each{ obj->if(obj.id!=item.id){newArray=newArray+obj}};(ctx._source.CILastCallResultRemark=newArray+item)",
"params": {
"item": {
"id": "2",
"remart": "x1"
}
}
}
}
And here is the es log:
Caused by: ScriptException[failed to run inline script [newArray = [];ctx._source.CILastCallResultRemark?.each{ obj->if(obj.id!=item.id){newArray=newArray+obj}};(ctx._source.CILastCallResultRemark=newArray+item)] using lang [groovy]]; nested: NoClassDefFoundError[sun/reflect/MethodAccessorImpl]; nested: ClassNotFoundException[sun.reflect.MethodAccessorImpl];
at org.elasticsearch.script.groovy.GroovyScriptEngineService$GroovyScript.run(GroovyScriptEngineService.java:318)
at org.elasticsearch.action.update.UpdateHelper.executeScript(UpdateHelper.java:251)
... 12 more
Caused by: java.lang.NoClassDefFoundError: sun/reflect/MethodAccessorImpl
i see the bug.i will update the es version and try.

Resources