Timestamp converter not working in Spring Data Rest with Spanner - spring

I'm trying to convert the input timestamp which will be in the string format to cloud timestamp with the help of a Spring Data Rest custom converter which is not working. Need an help on the same in understanding why custom converters are not invoked.
Input: http://localhost:8080/apipromocentral/promotions
RequestBody : {"startDateTime": "2019-11-07 15:53:00"}
POJO:
#ApiModel
#Data
#AllArgsConstructor
#Table(name = "PROMOTIONS")
public class Promotion {
/**
* promotion id
*/
#ApiModelProperty(notes = "Id of the Promotion", required = true)
#PrimaryKey
#Column(name = "PROMO_ID")
private String promotionId;
#ApiModelProperty(notes = "Start Date Time of a promotion", allowableValues="yyyy-MM-dd HH:mm:ss", required = true)
#JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss")
#Column(name = "START_DATE_TIME")
private Timestamp startDateTime; //java.sql.timestamp;
}
converter code
#Component
public class TimestampWriteConverter implements Converter<java.sql.Timestamp, Timestamp> {
#Override
public Timestamp convert(java.sql.Timestamp sqlTimestamp) {
//Return com.google.cloud.Timestamp;
return Timestamp.of(sqlTimestamp);
}
}
exception
"message": "FAILED_PRECONDITION: com.google.api.gax.rpc.FailedPreconditionException: io.grpc.StatusRuntimeException: FAILED_PRECONDITION: Invalid value for column START_DATE_TIME in table PROMOTIONS: Expected TIMESTAMP.",
"trace": "com.google.cloud.spanner.SpannerException: FAILED_PRECONDITION: com.google.api.gax.rpc.FailedPreconditionException: io.grpc.StatusRuntimeException: FAILED_PRECONDITION: Invalid value for column START_DATE_TIME in table PROMOTIONS: Expected TIMESTAMP.\r\n\tat com.google.cloud.spanner.SpannerExceptionFactory.newSpannerExceptionPreformatted(SpannerExceptionFactory.java:156)\r\n\tat com.google.cloud.spanner.SpannerExceptionFactory.newSpannerException(SpannerExceptionFactory.java:45)\r\n\tat com.google.cloud.spanner.SpannerExceptionFactory.newSpannerException(SpannerExceptionFactory.java:112)\r\n\tat

Looking at the documentation, looks like you need pass the TimestampWriteConverter converter to ConverterAwareMappingSpannerEntityProcessor.
#Configuration
public class ConverterConfiguration {
#Bean
public SpannerEntityProcessor spannerEntityProcessor(SpannerMappingContext spannerMappingContext) {
return new ConverterAwareMappingSpannerEntityProcessor(spannerMappingContext,
Arrays.asList(new TimestampWriteConverter()),
Arrays.asList());
}
}

Related

Failed to convert value of type 'java.lang.String' to required type 'java.util.Date' on swagger

I want to reach a function in the database with the spring boot api and get the value it returns.
When we enter the parameters in swagger, it gives an error in the date part.
When I call the date parameters to the function in oracle as 01-apr-2021, there is no error, but I cannot send it this way from spring.
Oracle funtion code :
CREATE OR REPLACE PACKAGE BODY MET.Z_PKG_OEE_NEW
FUNCTION Z_OEE_A1AfterReworkRatio(V_plant_config_num_id IN number, p_start_date in date, p_stop_date in date) RETURN NUMBER IS
v_result NUMBER;
p_cur001 SYS_REFCURSOR;
BEGIN
Z_OEE_A1AfterReworkRatio_Detail(V_plant_config_num_id,p_start_date,p_stop_date,p_cur001, v_result);
RETURN round(v_result,4);
END Z_OEE_A1AfterReworkRatio;
end;
ooeController:
#RestController
#RequestMapping("/api/oeeReports")
#CrossOrigin
public class OeeController {
private OeeReportService oeeReportService;
#Autowired
public OeeController(OeeReportService oeeReportService) {
this.oeeReportService=oeeReportService;
}
#GetMapping("A1AfterReworkRatio")
BigDecimal A1AfterReworkRatio(#RequestParam int V_plant_config_num_id, #RequestParam #DateTimeFormat(iso = DateTimeFormat.ISO.DATE) Date p_start_date ,#RequestParam #DateTimeFormat(iso = DateTimeFormat.ISO.DATE) Date p_stop_date) {
return this.oeeReportService.A1AfterReworkRatio( V_plant_config_num_id , p_start_date, p_stop_date);
}
}
oeeservice:
#Service
public class OeeReportManager implements OeeReportService {
private OeeDao oeeDao;
#Autowired
public OeeReportManager(OeeDao oeeDao) {
super();
this.oeeDao=oeeDao;
}
#Override
public BigDecimal A1AfterReworkRatio(int V_plant_config_num_id, Date p_start_date, Date p_stop_date) {
// TODO Auto-generated method stub
return this.oeeDao.A1AfterReworkRatio(V_plant_config_num_id, p_start_date, p_stop_date);
}
}
oeedao :
#Repository
public class OeeDao {
#Autowired
private EntityManager entitymanager;
public BigDecimal A1AfterReworkRatio(int V_plant_config_num_id,Date p_start_date,Date p_stop_date) {
BigDecimal commentCount = (BigDecimal) entitymanager
.createNativeQuery(
"SELECT Z_OEE_A1AfterReworkRatio(:V_plant_config_num_id:p_start_date:p_stop_date) FROM DUAL"
)
.setParameter("V_plant_config_num_id", V_plant_config_num_id).setParameter("p_start_date", p_start_date).setParameter("p_stop_date", p_stop_date)
.getSingleResult();
return commentCount;
}
}
swagger :
error :
{
"timestamp": "2021-08-26T07:00:23.487+00:00",
"status": 500,
"error": "Internal Server Error",
"trace": "org.springframework.dao.InvalidDataAccessApiUsageException: Could not locate named parameter [V_plant_config_num_id], expecting one of [V_plant_config_num_id:p_start_date:p_stop_date]; nested exception is java.lang.IllegalArgumentException: Could not locate named parameter [V_plant_config_num_id], expecting one of [V_plant_config_num_id:p_start_date:p_stop_date]\r\n\tat
How can solve this problem?
According to https://www.baeldung.com/spring-date-parameters
you can annotate your date parameters in OeeController (from: spring boot application {while creating beans error}) with #DateTimeFormat(iso = DateTimeFormat.ISO.DATE):
#GetMapping("A1AfterReworkRatio")
int A1AfterReworkRatio(#RequestParam int V_plant_config_num_id,
#RequestParam #DateTimeFormat(iso = DateTimeFormat.ISO.DATE) Date p_start_date,
#RequestParam #DateTimeFormat(iso = DateTimeFormat.ISO.DATE) Date p_stop_date) {
return this.oeeReportService.A1AfterReworkRatio( V_plant_config_num_id , p_start_date, p_stop_date);
}
The above article describes also other methods of achieving it if you read through it.

What is the ideal way to serialize and deserialize polymorphic entity attribute in spring boot?

I have an Entity class with a column attribute whose type is an abstract class. I want to serialize (object to JSON string) while saving it in the database column and deserialize it into an abstract class (which in turn converts the string to the appropriate concrete class) when it is retrieved from the database.
Here's how I accomplished it:
ProductEntity.java
#Entity
#Table(name="PRODUCT")
#Data
public class ProductEntity{
#Id
#Column(name = "ID", insertable = false)
#GeneratedValue(strategy = GenerationType.IDENTITY)
private BigInteger id;
#Column(name = "DESCRIPTION")
private String description;
#Column(name = "NAME")
private String name;
#Column(name = "PRODUCT_TYPE")
private String productType;
#Column(name = "PRODUCT_SPECS")
#JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.EXTERNAL_PROPERTY, property =
"productType") // -------------------> Map to concrete class based on productType value
#Convert(converter = ObjectConverter.class) // ------------> custom converter
private ProductSpecification productSpec;
}
NOTE : "PRODUCT_SPECS" database column is of JSON type.
ProductSpecification.java
#NoArgsConstructor
#JsonTypeInfo(use = JsonTypeInfo.Id.MINIMAL_CLASS,
include = JsonTypeInfo.As.WRAPPER_OBJECT,
#JsonSubTypes({
#JsonSubTypes.Type(value = ComputerSpecification.class, name = "computer"),
#JsonSubTypes.Type(value = SpeakerSpecification.class, name = "speaker")
})
public abstract class ProductSpecification{ }
ComputerSpecification.java
#Getter
#Setter
#NoArgsConstructor
#JsonTypeName("computer")
public class ComputerSpecification extends ProductSpecification {
String memory;
String displaySize;
String processor;
#JsonCreator
public ComputerSpecification (#JsonProperty("memory") String memory,
#JsonProperty("displaysize") String displaySize,
#JsonProperty("processor") String processor){
super();
this.memory = memory;
this.displaySize = displaySize;
this.processor = processor;
}
}
SpeakerSpecification.java
#Getter
#Setter
#NoArgsConstructor
#JsonTypeName("computer")
public class SpeakerSpecification extends ProductSpecification {
String dimension;
String sensitivity;
String bassPrinciple;
String amplifierPower;
#JsonCreator
public SpeakerSpecification (#JsonProperty("sensitivity") String sensitivity,
#JsonProperty("dimension") String dimension,
#JsonProperty("bassPrinciple") String bassPrinciple,
#JsonProperty("amplifierPower") String amplifierPower){
super();
this.sensitivity = sensitivity;
this.dimension = dimension;
this.bassPrinciple = bassPrinciple;
this.amplifierPower = amplifierPower;
}
}
ObjectConverter.java
NOTE: I am using Jackson ObjectMapper for serialization and deserialization.
public class ObjectConverter implements AttributeConverter<Object, String>{
private final static Logger LOGGER = LoggerFactory.getLogger(ObjectConverter.class);
private static final ObjectMapper mapper;
static {
mapper = new ObjectMapper();
mapper.setSerializationInclusion(Include.NON_NULL);
}
#Override
public String convertToDatabaseColumn(Object attributeObject) {
if (attributeObject == null) {
return "";
}
try {
return mapper.writeValueAsString(attributeObject);
} catch (JsonProcessingException e) {
LOGGER.error("Could not convert to database column", e);
return null;
}
}
#Override
public Object convertToEntityAttribute(String dbColumnValue) {
try {
if (StringUtils.isBlank(dbColumnValue)) {
return null;
}
return mapper.readValue(dbColumnValue, ProductSpecification.class); // ----> mapped to
abstract class
} catch (Exception e) {
LOGGER.error("Could not convert to entity attribute", e);
return null;
}
}
}
Request body 1:
{
"name" : "Bose Bass Module 700 - Black- Wireless, Compact Subwoofer",
"description" : "This wireless, compact subwoofer is designed to be paired with the Bose sound
bar 700 to bring music, movies, and TV to life with Deep, dramatic bass. ",
"productSpec" : {
"sensitivity" : "90 dB",
"bassPrinciple" : "reflex",
"amplifierPower" : "700 watts",
"dimension" : "14-5/16inW x 42-13/16inH x 16-5/16inD"
}
}
This request gets saved in the database column "PRODUCT_SPECS" as :
{".SpeakerSpecification ":{"sensitivity" : "90 dB","bassPrinciple" : "reflex", "amplifierPower" :"700
watts", "dimension" : "14-5/16inW x 42-13/16inH x 16-5/16inD" }}
Now this solution works perfectly fine. The "SpeakerSpecification " key neither appears in the response of GET API call nor in the swagger doc. But having to store the type info in the database really bothers me.
Is there a better approach to this problem where I could avoid having the typeinfo (".SpeakerSpecification ") in the column value?

How to format date correctly using Spring Data Elasticsearch

I'm using SpringBoot 2.2.5 with Elasticsearch 6.8.6. I'm in progress of migrating from Spring Data Jest to using the Spring Data Elasticsearch REST transport mechanism with ElasticsearchEntityMapper.
I have a Date field with the following definition:
#JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSZ")
#Field(type = FieldType.Date, format = DateFormat.custom, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSZ")
private Date date;
I would like the date stored in Elasticsearch like this:
"date": "2020-04-02T14:49:05.672+0000"
When I start the application, the index is created but when I try to save the entity I get the following exception:
Caused by: org.elasticsearch.client.ResponseException: method [POST], host [http://localhost:9200], URI [/trends/estrend?timeout=1m], status line [HTTP/1.1 400 Bad Request]
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"failed to parse field [date] of type [date] in document with id 'rS5UP3EB9eKtCTMXW_Ky'"}],"type":"mapper_parsing_exception","reason":"failed to parse field [date] of type [date] in document with id 'rS5UP3EB9eKtCTMXW_Ky'","caused_by":{"type":"illegal_argument_exception","reason":"Invalid format: \"1585905425266\" is malformed at \"5266\""}},"status":400}
Any pointers on what I'm doing wrong and what I should do to fix it?
Configuration and entity definitions below:
#Configuration
public class ElasticsearchConfig extends AbstractElasticsearchConfiguration {
#Value("${spring.data.elasticsearch.host}")
private String elasticSearchHost;
#Value("${spring.data.elasticsearch.port}")
private String elasticSearchPort;
#Bean
public RestHighLevelClient elasticsearchClient() {
final ClientConfiguration clientConfiguration = ClientConfiguration.builder()
.connectedTo(elasticSearchHost + ":" + elasticSearchPort)
.usingSsl()
.build();
return RestClients.create(clientConfiguration).rest();
}
#Bean
public EntityMapper entityMapper() {
ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper(elasticsearchMappingContext(), new DefaultConversionService());
entityMapper.setConversions(elasticsearchCustomConversions());
return entityMapper;
}
}
package com.es.test;
import java.util.Date;
import java.util.UUID;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.springframework.data.annotation.Id;
import org.springframework.data.elasticsearch.annotations.DateFormat;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;
#Document(indexName = "trends")
public class EsTrend {
#Id
private UUID id;
#JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSZ")
#Field(type = FieldType.Date, format = DateFormat.custom, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSZ")
private Date date;
private String entityOrRelationshipId;
// getter and setters
}
Update:
If I disable the ElasticsearchEntityMapper bean, I don't get the exception and the date is written in the correct format to Elasticsearch. Is there anything else I need to configure for the ElasticsearchEntityMapper?
First, please don't use the Jackson based default mapper. It is removed in the next major version of Spring Data Elasticsearch (4.0). Then there will be no choice available, and internally the ElasticsearchEntityMapperis used.
As to your problem: The ElasticsearchEntityMapperin version 3.2, which is used by Spring Boot currently, does not use the date relevant information from the #Field attribute to convert the entity, it is only used for the index mappings creation. This was a missing feature or bug and is fixed in the next major version, the whole mapping process was overhauled there.
What you can do in your current situation: You need to add custom converters. You can do this in your configuration class like this:
#Configuration
public class ElasticsearchConfig extends AbstractElasticsearchConfiguration {
private static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
#Value("${spring.data.elasticsearch.host}")
private String elasticSearchHost;
#Value("${spring.data.elasticsearch.port}")
private String elasticSearchPort;
#Bean
public RestHighLevelClient elasticsearchClient() {
final ClientConfiguration clientConfiguration = ClientConfiguration.builder()
.connectedTo(elasticSearchHost + ":" + elasticSearchPort)
.usingSsl()
.build();
return RestClients.create(clientConfiguration).rest();
}
#Bean
public EntityMapper entityMapper() {
ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper(elasticsearchMappingContext(), new DefaultConversionService());
entityMapper.setConversions(elasticsearchCustomConversions());
return entityMapper;
}
#Override
public ElasticsearchCustomConversions elasticsearchCustomConversions() {
return new ElasticsearchCustomConversions(Arrays.asList(DateToStringConverter.INSTANCE, StringToDateConverter.INSTANCE));
}
#WritingConverter
enum DateToStringConverter implements Converter<Date, String> {
INSTANCE;
#Override
public String convert(Date date) {
return formatter.format(date);
}
}
#ReadingConverter
enum StringToDateConverter implements Converter<String, Date> {
INSTANCE;
#Override
public Date convert(String s) {
try {
return formatter.parse(s);
} catch (ParseException e) {
return null;
}
}
}
}
You still need to have the dateformat in the #Field anotation though, because it is needed to create the correct index mappings.
And you should change your code to use the Java 8 introduced time classes like LocalDate or LocalDateTime, Spring Data Elasticsearch supports these out of the box, whereas java.util.Date would need custom converters.
Edit 09.04.2020: added the necessary #WritingConverter and #ReadingConverter annotations.
Edit 19.04.2020: Spring Data Elasticsearch 4.0 will support the java.util.Date class out of the box with the #Field annotation as well.
As I am a new joiner,I can't comment under #P.J.Meisch's anwser by the stack rules.
I also faced the problem, and solved it with #P.J.Meisch's anwser.
But just a little change with the #ReadingConverter.
Infact, the raw type read from ES, is Long, and the result type in java we need is LocalDateTime. Thus, the read converter shoud be Long to LocalDateTime.
Code follows below:
#Configuration
public class ElasticsearchClientConfig extends AbstractElasticsearchConfiguration {
public final static int TIME_OUT_MILLIS = 50000;
#Autowired
private ElasticsearchProperties elasticsearchProperties;
#Override
#Bean
public RestHighLevelClient elasticsearchClient() {
final ClientConfiguration clientConfiguration = ClientConfiguration.builder()
.connectedTo(elasticsearchProperties.getHost() + ":" + elasticsearchProperties.getPort())
.withBasicAuth(elasticsearchProperties.getName(), elasticsearchProperties.getPassword())
.withSocketTimeout(TIME_OUT_MILLIS)
.withConnectTimeout(TIME_OUT_MILLIS)
.build();
return RestClients.create(clientConfiguration).rest();
}
/**
* Java LocalDateTime to ElasticSearch Date mapping
*
* #return EntityMapper
*/
#Override
#Bean
public EntityMapper entityMapper() {
ElasticsearchEntityMapper entityMapper = new ElasticsearchEntityMapper(elasticsearchMappingContext(), new DefaultConversionService());
entityMapper.setConversions(elasticsearchCustomConversions());
return entityMapper;
}
#Override
public ElasticsearchCustomConversions elasticsearchCustomConversions() {
return new ElasticsearchCustomConversions(Arrays.asList(DateToStringConverter.INSTANCE, LongToLocalDateTimeConverter.INSTANCE));
}
#WritingConverter
enum DateToStringConverter implements Converter<Date, String> {
/**
* instance
*/
INSTANCE;
#Override
public String convert(#NonNull Date date) {
return DateUtil.format(date, DateConstant.TIME_PATTERN);
}
}
**#ReadingConverter
enum LongToLocalDateTimeConverter implements Converter<Long, LocalDateTime> {
/**
* instance
*/
INSTANCE;
#Override
public LocalDateTime convert(#NonNull Long s) {
return LocalDateTime.ofInstant(Instant.ofEpochMilli(s), ZoneId.systemDefault());
}
}**
}
and the DateUtil file:
public class DateUtil {
/**
* lock obj
*/
private static final Object LOCK_OBJ = new Object();
/**
* sdf Map for different pattern
*/
private static final Map<String, ThreadLocal<SimpleDateFormat>> LOCAL_MAP = new HashMap<>();
/**
* thread safe
*
* #param pattern pattern
* #return SimpleDateFormat
*/
private static SimpleDateFormat getSdf(final String pattern) {
ThreadLocal<SimpleDateFormat> tl = LOCAL_MAP.get(pattern);
if (tl == null) {
synchronized (LOCK_OBJ) {
tl = LOCAL_MAP.get(pattern);
if (tl == null) {
System.out.println("put new sdf of pattern " + pattern + " to map");
tl = ThreadLocal.withInitial(() -> {
System.out.println("thread: " + Thread.currentThread() + " init pattern: " + pattern);
return new SimpleDateFormat(pattern);
});
LOCAL_MAP.put(pattern, tl);
}
}
}
return tl.get();
}
/**
* format
*
* #param date date
* #param pattern pattern
* #return String
*/
public static String format(Date date, String pattern) {
return getSdf(pattern).format(date);
}
}
at last,
pls vote for #P.J.Meisch, not me.

Spring Data Rest and Oracle Between 2 dates query giving nothing

Spring Data rest is not able to fetch the data in between 2 dates from data base table.
Collection<XXXX> findByCreatedOnBetween(LocalDate fromDate, LocalDate todayDate);
From Bean
private LocalDate createdOn;
Collection<XXXX> findByCreatedOnBetween(LocalDate fromDate, LocalDate todayDate);
private LocalDate createdOn;
I want the data in between 2 dates:
SELECT
*
FROM
testing testing
WHERE
testing.created_on BETWEEN ? AND ? ;
I believe Spring Data Rest accepts only ISO 8601 date format by default (like 2018-10-22).
If you want to accept the date in different format you need to add a converter.
#Configuration
public class RepositoryRestConfig extends RepositoryRestConfigurerAdapter {
#Autowired
CustomDateConverter customDateConverter;
#Override
public void configureConversionService(ConfigurableConversionService conversionService) {
conversionService.addConverter(customDateConverter);
super.configureConversionService(conversionService);
}
}
#Component
public class CustomDateConverter implements Converter<String, LocalDate > {
#Override
public LocalDate convert(String source) {
return LocalDate.from(DateTimeFormatter.ofPattern("dd-MMM-yy").parse(source));
}
}

Validate input before Jackson in Spring Boot

I've built a REST endpoint using Spring Boot. JSON is posted to the endpoint. Jackson converts the JSON giving me an object.
The JSON look like this:
{
"parameterDateUnadjusted": "2017-01-01",
"parameterDateAdjusted": "2017-01-02"
}
Jackson converts the JSON to an object based on this class:
public class ParameterDate {
#NotNull(message = "Parameter Date Unadjusted can not be blank or null")
#DateTimeFormat(pattern = "yyyy-MM-dd")
private Date parameterDateUnadjusted;
#NotNull(message = "Parameter Date Adjusted can not be blank or null")
#DateTimeFormat(pattern = "yyyy-MM-dd")
private Date parameterDateAdjusted;
private Date parameterDateAdded;
private Date parameterDateChanged;
}
This all works fine. The issue I'm having is that I would like to validate the data before Jackson converts the data. For instance if I post
{
"parameterDateUnadjusted": "2017-01-01",
"parameterDateAdjusted": "2017-01-40"
}
Where parameterDateAdjusted is not a valid date (there is no month with 40 days in it). Jackson converts this to 2017-02-09. One way of getting around this is to have a class that is only strings let's call it ParameterDateInput. Validate each filed with Hibernate Validator in the parameterDateInput object and then copy the parameterDateInput object to parameterDate where each field has the correct type (dates are of type Date and not of type String). This to me doesn't look like a very elegant solution. Is there some other way I can solve this? How is data generally validated in Spring Boot when posted as JSON? I like to be able to send back a message to the user/client what is wrong with the data that is being posted.
How about a custom JSON deserializer where you can write down the logic you want:
#RestController
public class JacksonCustomDesRestEndpoint {
#RequestMapping(value = "/yourEndPoint", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
#ResponseBody
public Object createRole(#RequestBody ParameterDate paramDate) {
return paramDate;
}
}
#JsonDeserialize(using = RoleDeserializer.class)
public class ParameterDate {
// ......
}
public class RoleDeserializer extends JsonDeserializer<ParameterDate> {
#Override
public ParameterDate deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
ObjectCodec oc = jsonParser.getCodec();
JsonNode node = oc.readTree(jsonParser);
String parameterDateUnadjusted = node.get("parameterDateUnadjusted").getTextValue();
//Do what you want with the date and set it to object from type ParameterDate and return the object at the end.
//Don't forget to fill all the properties to this object because you do not want to lose data that came from the request.
return something;
}
}
There is a way to check the dates. setLenient() method
public static boolean isValidDate(String inDate, String format) {
SimpleDateFormat dateFormat = new SimpleDateFormat(format);
dateFormat.setLenient(false);
try {
dateFormat.parse(inDate.trim());
} catch (ParseException pe) {
return false;
}
return true;
}
Just define own annotation to validate the value
#Target({ FIELD, METHOD, PARAMETER, ANNOTATION_TYPE })
#Retention(RUNTIME)
#Constraint(validatedBy = MyDateFormatCheckValidator.class)
#Documented
public #interface MyDateFormatCheck {
String pattern();
...
and the validator class
public class MyDateFormatCheckValidator implements ConstraintValidator<MyDateFormatCheck, String> {
private MyDateFormatCheck check;
#Override
public void initialize(MyDateFormatCheck constraintAnnotation) {
this.check= constraintAnnotation;
}
#Override
public boolean isValid(String object, ConstraintValidatorContext constraintContext) {
if ( object == null ) {
return true;
}
return isValidDate(object, check.pattern());
}
}

Resources