Springfox (swagger) - How to add dynamic attributs - spring

I am trying to add new attribut in swagger definition in my java spring project.
I have read documentation and specially
https://springfox.github.io/springfox/docs/snapshot/#plugins
But i was not able to add new attribute from scratch
I am trying this code but it's not really what i want:
import java.util.HashMap;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import com.fasterxml.classmate.ResolvedType;
import com.fasterxml.classmate.TypeResolver;
import com.google.common.base.Predicates;
import fr.hop.springdatarest.demo.entity.City;
import lombok.extern.java.Log;
import springfox.documentation.builders.ModelPropertyBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.schema.Model;
import springfox.documentation.schema.ModelProperty;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spi.schema.ModelBuilderPlugin;
import springfox.documentation.spi.schema.contexts.ModelContext;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger.common.SwaggerPluginSupport;
#Log
#Component
#Order(SwaggerPluginSupport.SWAGGER_PLUGIN_ORDER + 1008)
public class SwaggerDefinitionAddin implements ModelBuilderPlugin {
#Autowired
TypeResolver resolver;
Map<String, ModelProperty> cityAddinMap = new HashMap<String, ModelProperty>();
#Override
public boolean supports(DocumentationType delimiter) {
return DocumentationType.SWAGGER_2.equals(delimiter);
}
private Class<?> forClass(ModelContext context) {
return resolver.resolve(context.getType()).getErasedType();
}
#Override
public void apply(ModelContext modelContext) {
if(forClass(modelContext) == City.class){
modelContext.getBuilder().id("TEST").properties(????).build();
}
}
}
For exemple i have this definition:
"definitions": {
"City": {
"type": "object",
"properties": {
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string"
},
"postalCode": {
"type": "integer",
"format": "int32"
}
}
},
And i want:
"definitions": {
"City": {
"type": "object",
"properties": {
"id": {
"type": "integer",
"format": "int64"
},
"name": {
"type": "string"
},
"postalCode": {
"type": "integer",
"format": "int32"
},
"meteo": {
"$ref": "#/definitions/Meteo"
}
}
},
Can you help me to add the meteo attribut in the definition ?
In this case my goal is adding attribut programmatically without using annotation.

After some research and code test i found this working solution :
#Component
#Order(SwaggerPluginSupport.SWAGGER_PLUGIN_ORDER + 1008)
public class SwaggerDefinitionAddin implements ModelBuilderPlugin {
#Autowired
TypeResolver resolver;
#Autowired
TypeNameExtractor typeNameExtractor;
Map<String, ModelProperty> propertyAddinMap = new HashMap<String, ModelProperty>();
#Override
public boolean supports(DocumentationType delimiter) {
return DocumentationType.SWAGGER_2.equals(delimiter);
}
private Class<?> forClass(ModelContext context) {
return resolver.resolve(context.getType()).getErasedType();
}
#Override
public void apply(ModelContext modelContext) {
boolean cityScope = false;
Class<?> modelClass = forClass(modelContext);
// Detect if it is City modelcontext type instance
if(modelClass == City.class) {
cityScope = true;
}
// Or an Hateoas resource pointing on City instance
else if(modelClass.equals(Resource.class)){
ResolvedType resourceResolveType = resolver.resolve(modelContext.getType()).getTypeBindings().getTypeParameters().get(0);
if(resourceResolveType.getErasedType().equals(City.class))
cityScope = true;
}
// Add meteo definition in City definition and ResourceCity definition
if(cityScope){
ModelPropertyBuilder builder = new ModelPropertyBuilder();
ModelProperty meteoProperty = builder
.name("meteo")
.type(resolver.resolve(Meteo.class))
.required(true)
.isHidden(false)
.position(0)
.build();
meteoProperty.updateModelRef(modelRefFactory(modelContext, typeNameExtractor));
propertyAddinMap.put("meteo",meteoProperty);
modelContext.getBuilder()
.name("City")
.properties(propertyAddinMap)
.build();
}
}
}
I had some difficulties to found why springfox generate NPE and after lookat source code i found that we have to set the modelRef through:
meteoProperty.updateModelRef(modelRefFactory(modelContext, typeNameExtractor));
Hope that help someone else :)

Related

Spring Boot Repository Returns the same value

Controller.java
#RestController
#RequestMapping(value = "/v2/customers/")
#Api(value = "account", description = "Operations pertaining to account operations")
public class AccountController {
private final AccountService accountService;
public AccountController(AccountService accountService) {
this.accountService = accountService;
}
#GetMapping(value = "accounts/{CIFs}")
public Response getAccountByCIF(#PathVariable String CIFs) {
return Response.ok().setData(accountService.findByCustomerNumberIn(CIFs));
}
}
AccountRepository.java
#Repository
public interface AccountRepository extends JpaRepository<AccountView, String> {
List<AccountView> getByCustomerNumber(String cif);
List<AccountView> getByCustomerNumberIn(List<String> cifList);
}
AccountService.java
public interface AccountService {
List<AccountDto> findByCustomerNumber(String cifList);
List<AccountDto> findByCustomerNumberIn(String cifList);
}
AccountServiceImpl.java
#Service
public class AccountServiceImpl implements AccountService {
private final AccountRepository accountRepository;
#Autowired
private AccountMapper accountMapper;
public AccountServiceImpl(AccountRepository accountRepository) {
this.accountRepository = accountRepository;
}
#Override
public List<AccountDto> findByCustomerNumber(String cif) {
List<AccountView> account = accountRepository.getByCustomerNumber(cif);
System.out.println(account);
if (!account.isEmpty()) {
return accountMapper.toAccountDtoList(account).stream().collect(Collectors.toList());
} else {
return Collections.emptyList();
}
}
#Override
public List<AccountDto> findByCustomerNumberIn(String cifList) {
List<String> cif = Arrays.asList(cifList.split(","));
List<AccountView> account = accountRepository.getByCustomerNumberIn(cif);
System.out.println(account);
if (!account.isEmpty()) {
return accountMapper.toAccountDtoList(account).stream().collect(Collectors.toList());
} else {
return Collections.emptyList();
}
}
When I run the application although the database have different records
it return the same value over again and again .
This is the json outcome. For queried customer there are 6 record in the database, but the endpoint return 6 same record as above.
{
"name": "ACDESC33811018409238803204",
"customer_number": "9238803",
"account_number": "33811018409238803204",
"book_ACC": "33811018409238803204",
"account_branch": {
"branch_number": "204",
"branch_name": "MERKEZI FILIAL"
},
"balance": 88.45,
"currency": "USD",
"status": "NORM",
"class": "33811",
"open_date": "2006-05-26",
"location": null,
"ibanrgd": "AZ16IBAZ33811018409238803204",
"expcat": "GENERAL",
"user_defined_fields": [],
"type": "S",
"number": "2388030001",
"start_tod_limit_date": null,
"end_tod_limit_date": null,
"tod_limit_Interest": null,
"tod_limit_amount": null,
"tod_limit_total_amount": null
},
AccountMapper.java
package az.iba.ms.account.dto.mapper;
import az.iba.ms.account.dto.model.AccountBranchDto;
import az.iba.ms.account.dto.model.AccountDto;
import az.iba.ms.account.dto.model.FieldDto;
import az.iba.ms.account.dto.model.UserDefinedFieldsDto;
import az.iba.ms.account.entity.AccountView;
import org.springframework.stereotype.Component;
import java.sql.Array;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
#Component
public class AccountMapper {
public List<AccountDto> toAccountDtoList(List<AccountView> accountViews) {
return accountViews.stream()
.map(accountView -> toAccountDto(accountView))
.collect(Collectors.toList());
}
public static AccountDto toAccountDto(AccountView account) {
return new AccountDto()
.setName(account.getName())
.setCustomerNumber(account.getCustomerNumber())
.setAccountNumber(account.getAccountNumber())
.setBookACC(account.getBookACC())
.setAccountBranch(new AccountBranchDto( account.getBranchCode(), account.getBranchName()))
.setBalance(account.getBalance())
.setCurrency(account.getCurrency())
.setStatus(account.getStatus())
.set_class(account.get_class())
.setOpenDate(account.getOpenDate())
.setLocation(account.getLocation())
.setIbanRgd(account.getIbanRgd())
.setExpcat(account.getExpcat())
.setUser_defined_fields(null ) // UserDefinedFieldsDto[] = new FieldDto(account.getFieldValue(),account.getFieldName() ))
.setAccType(account.getAccType())
.setNumber(account.getNumber())
.setTodLimitStartDate(account.getTodLimitStartDate())
.setTodLimitEndDate(account.getTodLimitEndDate())
.setTodLimitInterest(account.getTodLimitInterest())
.setTodLimitAmount(account.getTodLimitAmount())
.setTodLimitTotalAmount(account.getTodLimitTotalAmount());
}
}

Find Distinct Embedded Document and further make distinct using Field

I'm using Spring Boot Mongo example. I went through many links like: I want result with distinct value of one field from mongodb using spring data, but still did not get any break through. I am using below code:
List<Object> obj = mongoTemplate.query(Health.class).distinct("healths").all();
List<Health> healths = null;
if (!CollectionUtils.isEmpty(obj)) {
healths = obj.stream().map(e -> (Health) e).collect(Collectors.toList());
}
With this code I am getting duplicate HealthCode=E, Is there any way if I can take decision with the healthCd field ? Note: healths is embedded document within Patient document.
Response:
[
{
"healthCd": "D",
"healthName": "ABC",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "C",
"healthName": "MONO",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "E",
"healthName": "BONO",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "B",
"healthName": "JOJO",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "A",
"healthName": "KOKO",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "1",
"healthName": "LULU",
"effDate": "2012-08-24T07:16:33"
},
{
"healthCd": "E",
"healthName": "BOBO",
"effDate": "2014-07-26T22:37:49"
}
]
Health
#Data
#Builder
#NoArgsConstructor
#AllArgsConstructor
public class Health {
#Field
private String healthCd;
#Field
private String healthName;
#Field
private LocalDateTime effDate;
}
You may use MongoBD aggregation to get desired result (Take a look):
db.health.aggregate([
{
$sort: {
"healths.effDate": 1
}
},
{
$group: {
_id: "$healths.healthCd",
healths: {
$first: "$healths"
}
}
},
{
$replaceRoot: {
newRoot: "$healths"
}
}
])
MongoPlayground
Spring Boot Implementation
package com.example.demo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.data.domain.Sort.Direction;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.aggregation.Aggregation;
import org.springframework.data.mongodb.core.aggregation.AggregationResults;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
#SpringBootApplication
public class DemoApplication implements CommandLineRunner {
#Autowired
private MongoTemplate mongoTemplate;
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
#Override
public void run(String... args) throws Exception {
// //If your operator is not available inside Aggregation or query is too complex,
// //use below code to write MongoDB shell code directly as JSON
// new AggregationOperation() {
//
// #Override
// public Document toDocument(AggregationOperationContext context) {
// return new Document("$group",
// new Document("_id", "$healths.healthCd")
// .append("healths", new Document("$first", "$healths")));
// }
//
// },
Aggregation agg = Aggregation.newAggregation(
Aggregation.sort(Direction.ASC, "healths.effDate"),
Aggregation.group("healths.healthCd").first("healths").as("healths"),
Aggregation.replaceRoot("healths")
);
AggregationResults<Healths> healths = mongoTemplate.aggregate(agg,
mongoTemplate.getCollectionName(Health.class), Healths.class);
for (Healths health : healths.getMappedResults()) {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
System.out.println(gson.toJson(health));
}
}
}

restful webservice get list of objects

My rest webservice returns the following output:
{
"result": {
"TICKET1": {
"number": "TICKET1",
"description": "aa"
},
"TICKET2": {
"number": "TICKET2",
"description": "dd"
}
}
}
To convert this into a list of Tickets I tried as below.
class TicketResponse {
private List<Ticket> result;
// Get Set
}
class Ticket {
private String number;
private String description;
// Get Set
}
TicketResponse response = restTemplate.getForObject(WEB_SERVICE_URL, TicketResponse.class);
But I get response as null. How to do this.
I'll provide two ways to do with the JSON structure you have.
Option 1:
Modify your TicketResponse class like below:
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
public class TicketResponse {
#JsonProperty("result")
private Map<String, Ticket> ticketsMap = new HashMap<>();
#JsonAnySetter
public void setUnknownField(String name, Ticket value) {
ticketsMap.put(name, value);
}
#JsonIgnore private List<Ticket> ticketsList;
public List<Ticket> getTicketsList() {
return ticketsMap.entrySet().stream().map(Entry::getValue).collect(Collectors.toList());
}
}
then you can get your list of tickets from:
response.getTicketsList();
Option 2:
Read your response in to a String
String response = restTemplate.getForObject(WEB_SERVICE_URL, String.class);
and use below code to convert it to a List<Ticket>
ObjectMapper mapper = new ObjectMapper();
JsonNode jsonNode = mapper.readTree(response);
JsonNode wantedJsonNode = jsonNode.get("result");
Map<String, Ticket> map =
mapper.convertValue(wantedJsonNode, new TypeReference<Map<String, Ticket>>() {});
List<Ticket> tickets =
map.entrySet().stream().map(Entry::getValue).collect(Collectors.toList());
The object you provided doesn't contain a list/array, which would be inside square brackets, like this:
{
"result": {
"tickets": [
{
"number": "TICKET1",
"description": "aa"
},
{
"number": "TICKET2",
"description": "dd"
}
]
}
}
Change your service if possible to return a list/array. Otherwise what you have is an object with individual fields named TICKET1 and TICKET2, so you'll need a field for each.
TicketResponse must have a structure that corresponds to response of the service.
You can change your TicketResponse class and add getTicketArray method:
public class TicketResponse {
private Map<String,Ticket> result;
// getter setter
public List<Ticket> getTicketsAsArray(){
return new ArrayList<Ticket>(result.values());
}
}

Custom sorting using script in Elasticsearch

I want to make use of the scripting to sort the results in the elasticsearch with custom logic. I have read the docs from the elasticsearch and could not make it up. After seeing some links on the internet, I tried a bit and below is the source code for the same. I am using native (Java) for it. I am not sure whether this is the correct approach.
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory;
import customSortProject.Sorted;
import java.util.Map;
public class CustomScriptFactory implements NativeScriptFactory {
public ExecutableScript newScript(Map<String, Object> params) {
return new Sorted(params);
}
}
And the class where i am implementing the logic for the sore. Currently I am just getting the future dates.
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.script.AbstractSearchScript;
public class Sorted extends AbstractSearchScript {
String fieldParam;
int lengthParam;
public Sorted(#Nullable Map<String,Object> params){
fieldParam = (String)params.get("field");
lengthParam = new Integer(params.get("length").toString()).intValue();
}
public Object run() {
if(source().containsKey(fieldParam) && source().get(fieldParam)!= null && source().get(fieldParam).toString() != null) {
String field = doc().get(fieldParam).toString();
field = field.replaceAll("\\[", "").replaceAll("\\]","");
long fieldLong = 0;
Date today = new Date();
fieldLong = Long.parseLong(field);
Date date = new Date(fieldLong);
List<Date> futureList = new ArrayList<Date>();
if (date.after(today))
futureList.add(date);
Collections.sort(futureList);
return futureList;
}
else {
return "";
}
}
}
With this logic and using the query_dsl where I am trying to call this script which is register in .yml file.
Query :
{
"query": {
"match": {
"title": "cancer"
}
},
"sort": {
"_script": {
"script": "sorted",
"lang": "native",
"type": "string",
"ignore_unmapped": true,
"params": {
"field": "startdate",
"length": 6
}
}
}
}
Please let me know is this correct approach to custom sorting. I want to call it from the query dsl, as our application is in PHP and we are using PHP's es-client to search.

How to use Elasticsearch plugin-defined filter

I have created a plugin for Elasticsearch and have installed it successfully (http://localhost:9200/_nodes/plugins/ shows it installed.) But I can't seem to use it in my queries - I only get errors. "ScriptException[dynamic scripting for [groovy] disabled]". It seems like I need a different lang setting. But I've tried 'lang': 'java'. No joy. I've tried lang: expression. Then I get "ExpressionScriptCompilationException[Unknown variable [maxmind] in expression". How do I access the plugin I've created? Or do I need to do something more to register it?
I've been following this excellent guide:
https://github.com/imotov/elasticsearch-native-script-example
But it says nothing about how queries should be written.
My AbstractPlugin:
package org.elasticsearch.plugin.maxmind;
import java.util.Collection;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.plugins.AbstractPlugin;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.plugin.maxmind.GeoLoc;
public class MaxMind extends AbstractPlugin {
#Override public String name() {
return "maxmind";
}
#Override public String description() {
return "Plugin to annotate ip addresses with maxmind geo data";
}
// Thanks https://github.com/imotov/elasticsearch-native-script-example
public void onModule(ScriptModule module) {
module.registerScript("geoloc", GeoLoc.Factory.class);
}
}
Note the name "geoloc". Is that the name I use in my query?
My GeoLoc module:
package org.elasticsearch.plugin.maxmind;
import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.script.AbstractSearchScript;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.NativeScriptFactory;
public class GeoLoc extends AbstractSearchScript {
public static class Factory implements NativeScriptFactory {
// called on every search on every shard
#Override
public ExecutableScript newScript
(#Nullable Map<String, Object> params)
{
String fieldName = params == null ? null:
XContentMapValues.nodeStringValue(params.get("field"), null);
if (fieldName == null) {
throw new ScriptException("Missing field parameter");
}
return new GeoLoc(fieldName);
}
}
private final String fieldName;
private GeoLoc(String fieldName) {
this.fieldName = fieldName;
}
#Override
public Object run() {
ScriptDocValues docValue = (ScriptDocValues) doc().get(fieldName);
if (docValue != null && !docValue.isEmpty()) {
// TODO: real geolocation here
HashMap fakeloc = new HashMap<String, String>();
fakeloc.put("lat", "1.123");
fakeloc.put("lon", "44.001");
fakeloc.put("basedon", docValue);
return fakeloc;
}
return false;
}
}
My query:
{
"_source": [
"uri",
"user_agent",
"server_ip",
"server_port",
"client_ip",
"client_port"
],
"query": {
"filtered": {
"filter": {}
}
},
"script_fields": {
"test1": {
"params": {
"field": "client_ip"
},
"script": "geoloc" // is this right?
}
},
"size": 1
}
You should be able to specify lang: "native" with your script, any script written in Java and registered with registerScript is the "native" type.

Resources