Update inner object element in Json using Gson - java-8

I have below json and need to update elements, below code works for elements in top level, How can I extend this to work it inside another inner level (object).
Json:
{
"name": George,
"version": "2.0",
"reqParams": {
"headerId": "this needs to be updated",
"queue": "draft",
}
}
In below code I am passing below
eg.
keyPath = "headerId"
updateText = "123456"
jsonText = above json
Code :
public String updateValue(String keyPath, String updateText, String jsonText) {
String[] keys = keyPath.split("/");
JsonParser jsonParser = new JsonParser();
JsonObject jsonObject = (JsonObject) jsonParser.parse(jsonText);
JsonObject returnVal = jsonObject; // This holds the ref to target json object
JsonPrimitive jp = new JsonPrimitive(updateText);
String finalKey = keys[keys.length - 1];
for(String key : keys)
{
if (jsonObject.get(key) != null && jsonObject.get(key).isJsonObject())
{
jsonObject = (JsonObject)jsonObject.get(key);
}
}
jsonObject.remove(finalKey);
jsonObject.add(finalKey, jp);
return returnVal.toString();
}
Code
Expected out put json:
{
"name": George,
"version": "2.0",
"reqParams": {
"headerId": "123456",
"queue": "draft",
}
}
Actual reult:
{
"name": George,
"version": "2.0",
"reqParams": {
"headerId": "this needs to be updated",
"queue": "draft",
},
"headerId": "123456",
}

Pass keyPath as "reqParams/headerId" because headerId is inside reqParams and not at root level of JSON.

Updated code slightly and pass parameters as suggested by #Smile answer
keyPath : reqParams/headerId
someId (if exist in root level)
Code :
public String updateValue(String keyPath, String updateText, String jsonText) {
String[] keys = keyPath.split("/");
JsonParser jsonParser = new JsonParser();
JsonObject jsonObject = (JsonObject) jsonParser.parse(jsonText);
JsonObject returnVal = jsonObject; // This holds the ref to target json object
JsonPrimitive jp = new JsonPrimitive(updateText);
String finalKey = keys[keys.length - 1];
for (String key : keys) {
if (jsonObject.get(key) != null && jsonObject.get(key).isJsonObject()) {
jsonObject = (JsonObject) jsonObject.get(key);
jsonObject.remove(finalKey);
jsonObject.add(finalKey, jp);
return returnVal.toString();
} else if (jsonObject.get(finalKey) == null) {
return returnVal.toString();
}
}
jsonObject.remove(finalKey);
jsonObject.add(finalKey, jp);
return returnVal.toString();
}

Related

Aggregation for multiple fields in elasticsearch

Is there any option in elasticsearch to use aggregation for multiple fields and get total count ?.
My query is
"SELECT COUNT(*), currency,type,status,channel FROM temp_index WHERE country='SG' and received_time=now/d group by currency,type,status,channel
Trying to implement the above in Java code using RestHighLevelClient , any suggestions or assistance will be helpful.
Currently we are using COUNT API
List<Object> dashboardsDataTotal = new ArrayList<>();
String[] channelList = { "test1", "test2", "test3", "test4", "test5", "test6" };
String[] currencyList = { "SGD", "HKD", "USD", "INR", "IDR", "PHP", "CNY" };
String[] statusList = { "COMPLETED", "FAILED", "PENDING", "FUTUREPROCESSINGDATE" };
String[] paymentTypeList = { "type1", "type2" };
String[] countryList = { "SG", "HK"};
CountRequest countRequest = new CountRequest(INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
try {
for (String country : countryAccess) { // per country
Map<String, Object> dashboardsDataPerCountry = new HashMap<>();
for (String channel : channelList) { // per channel
Map<String, Object> channelStore = new HashMap<>();
for (String paymentType : paymentTypeList) {
List<Object> paymentTypeStore = new ArrayList<>();
for (String currency : currencyList) {
Map<String, Object> currencyStore = new HashMap<>();
int receivedCount = 0;
for (String latestStatus : statusList) {
BoolQueryBuilder searchBoolQuery = QueryBuilders.boolQuery();
searchBoolQuery
.must(QueryBuilders.termQuery("channel", channel.toLowerCase()));
searchBoolQuery
.must(QueryBuilders.termQuery("currency", currency.toLowerCase()));
searchBoolQuery.must(QueryBuilders.matchPhraseQuery("source_country",
country.toLowerCase()));
if ("FUTUREPROCESSINGDATE".equalsIgnoreCase(latestStatus)) {
searchBoolQuery.must(
QueryBuilders.rangeQuery("processing_date").gt(currentDateS).timeZone(getTimeZone(country)));
}
else {
searchBoolQuery.must(QueryBuilders.termQuery("txn_latest_status",
latestStatus.toLowerCase()));
}
searchBoolQuery.must(
QueryBuilders.termQuery("paymentType", paymentType.toLowerCase()));
searchBoolQuery.must(QueryBuilders.rangeQuery("received_time").gte(currentDateS)
.lte(currentDateS).timeZone(getTimeZone(country)));
searchSourceBuilder.query(searchBoolQuery);
countRequest.source(searchSourceBuilder);
// try {
CountResponse countResponse = restHighLevelClient.count(countRequest,
RequestOptions.DEFAULT);
if (!latestStatus.equals("FUTUREPROCESSINGDATE")) {
receivedCount += countResponse.getCount();
}
currencyStore.put(latestStatus, countResponse.getCount());
}
currencyStore.put("RECEIVED", receivedCount); // received = pending + completed + failed
currencyStore.put("currency", currency);
paymentTypeStore.add(currencyStore);
} // per currency end
channelStore.put(paymentType, paymentTypeStore);
} // paymentType end
dashboardsDataPerCountry.put(channel, channelStore);
dashboardsDataPerCountry.put("country", country);
} // per channel end
dashboardsDataTotal.add(dashboardsDataPerCountry);
} // per country end
restHighLevelClient.close();
}
Appreciate if someone can provide a better solution to the above.
Made use of CompositeAggregationBuilder and got the aggregated results
CompositeAggregationBuilder compositeAgg = new CompositeAggregationBuilder("aggregate_buckets", sources);
searchSourceBuilder.aggregation(compositeAgg);
SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
Aggregations aggregations = searchResponse.getAggregations();
ParsedComposite parsedComposite = aggregations.get("aggregate_buckets");
List<ParsedBucket> list = parsedComposite.getBuckets();
Map<String,Object> data = new HashMap<>();
for (ParsedBucket parsedBucket : list) {
data.clear();
for (Map.Entry<String, Object> m : parsedBucket.getKey().entrySet()) {
data.put(m.getKey(), m.getValue());
}
data.put("count", parsedBucket.getDocCount());
System.out.println(data);
}

How to force serialization of decimal values inside dictionaries as doubles in mongodb csharp driver?

I have the following dictionary:
var dict = new Dictionary<string, object> {
{ "decimal", 3.503m },
{ "int", 45 }
};
var serializedString = dict.ToJson();
By default that is serialized as:
{ "decimal" : { "_t" : "System.Decimal", "_v" : "3.503" }, "int" : 45 }
If I override DecimalSerializer as:
BsonSerializer.RegisterSerializer<decimal>(new DecimalSerializer().WithRepresentation(BsonType.Double));
That only influences on how "_v" value is serialized, e.g.:
{ "decimal" : { "_t" : "System.Decimal", "_v" : 3.503 }, "int" : 45 }
Expected result:
{ "decimal" : 3.503, "int" : 45 }
Please advise
The cause of the .Net types in the bson, is the lack of type in the dictionary. The Bson serializers are trying to get enough state to restore the original object of the items in the dictionary. From the context (the dictionary) they are of type "object", so the .Net type is inserted to know enough when deserializing.
The following solutions answer your question but lose the type information for deserializing.
Solution 1: Change the dictionary type to <string, decimal>
var dict = new Dictionary<string, decimal> {
{ "decimal", 3.503m },
{ "int", 45 }
};
var serializedString = dict.ToJson();
Results in: { "decimal" : "3.503", "int" : "45" }
With your override of the decimal serializer, you get the expected result.
{ "decimal" : 3.503, "int" : 45 }
Solution 2: Change the dictionary type to <string, double>
var dict = new Dictionary<string, double> {
{ "decimal", (double)3.503m },
{ "int", 45 }
};
var serializedString = dict.ToJson();
Results in the expected result: { "decimal" : 3.503, "int" : 45 }
Solution 3: Use custom serializer
public class MyDictionarySerializer : SerializerBase<Dictionary<string, object>>
{
public override void Serialize(MongoDB.Bson.Serialization.BsonSerializationContext context, MongoDB.Bson.Serialization.BsonSerializationArgs args, Dictionary<string, object> dictionary)
{
context.Writer.WriteStartArray();
foreach (var item in dictionary)
{
context.Writer.WriteStartDocument();
context.Writer.WriteString(item.Key);
// TODO your converstions from object to double
var value = (double)item.Value;
context.Writer.WriteDouble(value);
context.Writer.WriteEndDocument();
}
context.Writer.WriteEndArray();
}
public override Dictionary<string, object> Deserialize(MongoDB.Bson.Serialization.BsonDeserializationContext context, MongoDB.Bson.Serialization.BsonDeserializationArgs args)
{
context.Reader.ReadStartArray();
var result = new Dictionary<string, object>();
while (true)
{
try
{
//this catch block only need to identify the end of the Array
context.Reader.ReadStartDocument();
}
catch (Exception exp)
{
context.Reader.ReadEndArray();
break;
}
var key = context.Reader.ReadString();
double value = context.Reader.ReadDouble();
result.Add(key, value);
context.Reader.ReadEndDocument();
}
return result;
}
}
As another option, it's possible to override object serializer
public class DecimalsOverridingObjectSerializer : ObjectSerializer
{
public override void Serialize(BsonSerializationContext context, BsonSerializationArgs args, object value) {
if (value != null && value is decimal) {
base.Serialize(context, args, Convert.ToDouble(value));
} else {
base.Serialize(context, args, value);
}
}
}
BsonSerializer.RegisterSerializer(typeof(object), new DecimalsOverridingObjectSerializer());
that still will not work for Hashtables.
Possible workaround for Hashtables:
public class DecimalsOverridingDictionarySerializer<TDictionary>:
DictionaryInterfaceImplementerSerializer<TDictionary>
where TDictionary : class, IDictionary, new()
{
public DecimalsOverridingDictionarySerializer(DictionaryRepresentation dictionaryRepresentation)
: base(dictionaryRepresentation, new DecimalsOverridingObjectSerializer(), new DecimalsOverridingObjectSerializer())
{ }
}
BsonSerializer.RegisterSerializer(typeof(Hashtable), new DecimalsOverridingDictionarySerializer<Hashtable>(DictionaryRepresentation.Document));

Get Elasticsearch response in DataTable in C#.Net

I m new to Elasticsearch 2.1.1, I want to Ask you how to fetch records from index using Suggest and convert it to DataTable. I've installed Nest NuGet Packages and all in my project.
I have this block of code which I want it from Elasticsearch in DataTable format
curl -X POST 'localhost:9200/music/_suggest?pretty' -d '{
"song-suggest" : {
"text" : "n",
"completion" : {
"field" : "suggest"
}
}
}'
I've some blocks of code it should fulfill your requirements.
As you said you are new to Elasticsearch, so I shwoing you first how to initialize connection.
var settings = new ConnectionSettings(new Uri("http://localhost:9200"));
var clientElasticSearchNet = new ElasticsearchClient(settings);
then use
var ElasticSearchNetQuery = new { music = new { "n", completion = new { field = "name_suggest" } } };
ElasticsearchResponse<string> result = clientElasticSearchNet.Suggest<string>("music", ElasticSearchNetQuery);
JObject json = JObject.Parse(result.Response.ToString());
var hitsCount = ((Newtonsoft.Json.Linq.JContainer)(json["music"].First["options"])).Count;
DataTable dtEsReponnse = new DataTable();
for (int i = 0; i < hitsCount; i++)
{
dtEsReponnse = ConvertJSONToDataTable(json["music"].First["options"][i].ToString());
}
I have created one function which will return Datatable.
protected DataTable ConvertJSONToDataTable(string jsonString)
{
string[] jsonParts = Regex.Split(jsonString.Replace("[", "").Replace("]", ""), "},{");
List<string> dtColumns = new List<string>();
foreach (string jp in jsonParts)
{
string[] propData = Regex.Split(jp.Replace("{", "").Replace("}", ""), ",");
foreach (string rowData in propData)
{
try
{
int idx = rowData.IndexOf(":");
string n = rowData.Substring(0, idx - 1);
string v = rowData.Substring(idx + 1);
if (!dtColumns.Contains(n))
{
dtColumns.Add(n.Replace("\"", ""));
}
}
catch (Exception ex)
{
throw new Exception(string.Format("Error Column Name : {0}", rowData));
}
}
break;
}
foreach (string c in dtColumns)
{
if (!dt.Columns.Contains(c.ToString().Replace("\r\n", "").Trim())) {
dt.Columns.Add(c.ToString().Replace("\r\n", "").Trim());
}
}
foreach (string jp in jsonParts)
{
string[] propData = Regex.Split(jp.Replace("{", "").Replace("}", ""), ",");
DataRow nr = dt.NewRow();
foreach (string rowData in propData)
{
try
{
int idx = rowData.IndexOf(":");
string n = rowData.Substring(0, idx - 1).Replace("\"", "").Replace("\r\n", "").Trim(); //replaced
string v = rowData.Substring(idx + 1).Replace("\"", "");
nr[n] = v;
}
catch (Exception ex)
{
continue;
}
}
dt.Rows.Add(nr);
}
return dt;
}
Is this code you looking for?

JSON.net Linq and NullValueHandling

Developing a new MVC4 app, I have followed this example on the JSON.net website to fill my viewmodel with a new JSON JObject:
FinalViewModel finalVM= new FinalViewModel();
IList<ResultModel> results = GetResultList();
FinalVM.QueryResults = results;
JObject myJSON = new JObject(
new JProperty("grid",
new JArray(
from g in results
group g by g.ResultYear into y
orderby y.Key
select new JObject {
new JProperty("Year", y.Key),
new JProperty("min_1", y.Min(g=> g.Result_val1)),
new JProperty("min_2", y.Min(g=> g.Result_val2)),
new JProperty("items",
new JArray(
from g in results
where g.ResultYear==y.Key
orderby g.id
select new JObject(
new JProperty("l", g.Result_val1),
new JProperty("j",g.Result_val2),
new JProperty("id", g.id)
)
)
)}
)));
FinalVM.DataJson = myJSON;
return PartialView("_JSONView", FinalVM);
Everything works fine and i get this type of json sent to my view:
{
"grid": [
{
"Year": 1998,
"min_val1": "12",
"min_val2": null,
"items": [
{
"l": 12,
"j": null,
"id": 60
},
{
"l": 25,
"j": null,
"id": 61
}
]
}
]
}
I would like to get rid of the null values when they exist. I read a lot about the NullValueHandling option but do not see how to use it into my Json.Net linq code.
Instead of creating JObjects as part of your LINQ transformation, use anonymous objects. (This will also make your code a heck of a lot more readable!) Afterward, you can load the result object into a JObject using a JsonSerializer instance that has NullValueHandling set to Ignore. This will get rid of the nulls. Here's a demo:
class Program
{
static void Main(string[] args)
{
IList<ResultModel> results = new List<ResultModel>
{
new ResultModel
{
id = 60,
ResultYear = 1998,
Result_val1 = 12,
Result_val2 = null
},
new ResultModel
{
id = 61,
ResultYear = 1998,
Result_val1 = 25,
Result_val2 = null
}
};
var groupedResult = new
{
grid = from g in results
group g by g.ResultYear into y
orderby y.Key
select new
{
Year = y.Key,
min_1 = y.Min(g => g.Result_val1),
min_2 = y.Min(g => g.Result_val2),
items = from g in results
where g.ResultYear == y.Key
orderby g.id
select new
{
l = g.Result_val1,
j = g.Result_val2,
id = g.id
}
}
};
JsonSerializer serializer = new JsonSerializer();
serializer.NullValueHandling = NullValueHandling.Ignore;
JObject myJSON = JObject.FromObject(groupedResult, serializer);
Console.WriteLine(myJSON.ToString(Formatting.Indented));
}
class ResultModel
{
public int id { get; set; }
public int ResultYear { get; set; }
public int? Result_val1 { get; set; }
public int? Result_val2 { get; set; }
}
}
Output:
{
"grid": [
{
"Year": 1998,
"min_1": 12,
"items": [
{
"l": 12,
"id": 60
},
{
"l": 25,
"id": 61
}
]
}
]
}
One other note: if you are not planning to manipulate the JSON, you can actually skip the JObject altogether and serialize your grouped result directly to string instead:
JsonSerializerSettings settings = new JsonSerializerSettings();
settings.NullValueHandling = NullValueHandling.Ignore;
settings.Formatting = Formatting.Indented;
string myJSON = JsonConvert.SerializeObject(groupedResult, settings);

Birt Report not opening in PDF

Hello guys
I am sending my form values to controller and controller to rptdesign file my it is generating the report in temp folder with proper value but my requirement is that it should user to save or open dialog so that user can save the report or open
i think ajax request will not allow to download any file so if some one know to better solution plz reply
my controller is below
#RequestMapping("/leave/generateEmpLeaveReport.json")
public void generateEmployeeLeaveReport(HttpServletRequest request,
HttpServletResponse response) throws Exception {
String reportName = "D:/git-repositories/cougar_leave/src/java/com//report/myLeaveSummary.rptdesign";
File designTemplateFile = new File(reportName);
if (!designTemplateFile.exists()) {
throw new FileNotFoundException(reportName);
}
Map<String, Object> parameters = new HashMap<String, Object>();
parameters.put("empId", NumberUtils.toInt(request.getParameter("id")));
parameters.put("reportTitle", "EMPLOYEE LEAVE");
parameters.put("fromDate", request.getParameter("fromDate"));
parameters.put("toDate", request.getParameter("toDate"));
parameters.put("leaveType",
NumberUtils.toInt(request.getParameter("leaveType")));
parameters.put("transactionType",
NumberUtils.toInt(request.getParameter("transactionType")));
reportManager.addSystemParams(parameters, null,
RequestUtils.getUser(request));
File file = null;
try {
ReportType reportType = ReportType.PDF;
OfflineReportContext reportContext = new OfflineReportContext(
reportName, reportType, parameters, null,
"EMPLOYEE LEAVE SUMMARY");
StringBuffer buffer = new StringBuffer();
file = offlineReportGenerator.generateReportFile(reportContext,
buffer);
ControllerUtils
.openFile(file.getParent(), response, file.getName());
} catch (Exception e) {
log.error(e, e);
} finally {
if (file != null && file.exists()) {
file.canExecute();
}
}
}
my ajax request is below
generateReport : function() {
if (this.form.valid()) {
fromDate = new Date($("input[name='fromDate']").val())
toDate = new Date($("input[name='toDate']").val())
if (fromDate > toDate) {
GtsJQuery
.showError("To date should be greater or equals than From date !")
} else {
var request = GtsJQuery.ajax3(GtsJQuery.getContextPath()
+ '/leave/generateEmpLeaveReport.json', {
data : {
id : $("input[name='employeeId']").val(),
fromDate : $("input[name='fromDate']")
.val(),
toDate : $("input[name='toDate']").val(),
leaveType : $("select[name='leaveType']")
.val(),
transactionType : $("select[name='transactionType']")
.val(),
orderBy : $("select[name='orderBy']").val()
}
});
request.success(this.callback("onSubscribeSuccess"))
}
}
},
The controller response should be the temp file itself, just adjust the content-type.

Resources