LocalDate deserialization &serialization error -jackson.databind.exc.InvalidDefinitionException: Cannot construct instance of java.time.LocalDate - spring

The birthdate of the employee stored in the Elastic seems to be stored as an incorrect datatype. I have setup code for Elasticseach client with object mapper and registered JavaTime module. I am trying to pull employees list from Elasticsearch and it doesn't seem to work and is failing at deserializing.
Jar Versions:
org.elasticsearch.client:elasticsearch-rest-client -> 6.4.3 (*)
org.elasticsearch.client:elasticsearch-rest-high-level-client -> 6.4.3
org.elasticsearch:elasticsearch -> 6.4.3
org.springframework.data:spring-data-elasticsearch -> 3.1.4.RELEASE
com.fasterxml.jackson.datatype:jackson-datatype-hppc -> 2.9.8
com.fasterxml.jackson.core:jackson-core:2.9.8
com.fasterxml.jackson.core:jackson-databind:2.9.8 (*)
com.fasterxml.jackson.datatype:jackson-datatype-jsr310 -> 2.9.8 (*)
com.fasterxml.jackson.datatype:jackson-datatype-hibernate5 -> 2.9.8
com.fasterxml.jackson.core:jackson-annotations -> 2.9.0
com.fasterxml.jackson.module:jackson-module-afterburner -> 2.9.8
com.fasterxml.jackson.module:jackson-module-parameter-names:2.9.8 (*)
com.fasterxml.jackson.module:jackson-modules-java8:2.9.8
Build.Gradle:
compile "com.fasterxml.jackson.datatype:jackson-datatype-hppc"
compile "com.fasterxml.jackson.datatype:jackson-datatype-jsr310"
compile "com.fasterxml.jackson.datatype:jackson-datatype-hibernate5"
compile "com.fasterxml.jackson.core:jackson-annotations"
compile "com.fasterxml.jackson.core:jackson-databind"
compile "com.fasterxml.jackson.module:jackson-module-afterburner"
runtime group: 'com.fasterxml.jackson.module', name: 'jackson-modules-java8', version: '2.9.8', ext: 'pom'
// https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names
compile group: 'com.fasterxml.jackson.module', name: 'jackson-module-parameter-names', version: '2.9.8'
compile 'org.elasticsearch:elasticsearch'
compile 'org.elasticsearch.client:transport'
compile 'org.elasticsearch.client:elasticsearch-rest-client'
compile 'org.elasticsearch.client:elasticsearch-rest-high-level-client'
compile 'org.springframework.data:spring-data-elasticsearch'
application.yml
jackson:
serialization:
WRITE_DATES_AS_TIMESTAMPS: false
jpa:
open-in-view: false
ElasticConfiguration Code:
#AutoConfigureAfter(value = { JacksonConfiguration.class })
public class ElasticsearchConfiguration {
RestHighLevelClient client = null;
#Bean
public RestHighLevelClient buildClient() {
return new RestHighLevelClient(
RestClient.builder(
new HttpHost("localhost", 9200, "http"),
new HttpHost("localhost", 9201, "http")));
}
#Bean
#Primary
public ElasticsearchOperations elasticsearchTemplate(final ElasticsearchConverter elasticsearchConverter,
EntityMapper mapper, Jackson2ObjectMapperBuilder jackson2ObjectMapperBuilder ) {
return new ElasticsearchTemplate((Client) buildClient(), new CustomEntityMapper(new ObjectMapper()));
}
//jackson2ObjectMapperBuilder.createXmlMapper(false).build()
public class CustomEntityMapper implements EntityMapper {
private ObjectMapper objectMapper;
public CustomEntityMapper(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true);
objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
objectMapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
//objectMapper.registerModule(new JavaTimeModule());
JavaTimeModule javaTimeModule=new JavaTimeModule();
// Hack time module to allow 'Z' at the end of string (i.e. javascript json's)
javaTimeModule.addDeserializer(LocalDateTime.class, new LocalDateTimeDeserializer(DateTimeFormatter.ISO_DATE_TIME));
javaTimeModule.addDeserializer(LocalDate.class, new LocalDateDeserializer(DateTimeFormatter.ISO_DATE));
objectMapper.registerModule(javaTimeModule);
objectMapper.registerModule(new ParameterNamesModule());
objectMapper.registerModule(new Jdk8Module());
objectMapper.findAndRegisterModules();
}
#Override
public String mapToString(Object object) throws IOException {
return objectMapper.writeValueAsString(object);
}
#Override
public <T> T mapToObject(String source, Class<T> clazz) throws IOException {
return objectMapper.readValue(source, clazz);
}
}
}
Error:
Caused by: org.springframework.data.elasticsearch.ElasticsearchException: failed to map source [ {"id":73,"name":"K****","birthDate":{"year":2019,"month":"MAY","dayOfYear":150,"leapYear":false,"dayOfMonth":30,"dayOfWeek":"THURSDAY","era":"CE","monthValue":5,"chronology":{"id":"ISO","calendarType":"iso8601"}},"role..}] to class Employee
at org.springframework.data.elasticsearch.core.AbstractResultMapper.mapEntity(AbstractResultMapper.java:45) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.core.DefaultResultMapper.mapResults(DefaultResultMapper.java:102) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.core.ElasticsearchTemplate.queryForPage(ElasticsearchTemplate.java:315) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.core.ElasticsearchTemplate.queryForPage(ElasticsearchTemplate.java:309) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.core.ElasticsearchTemplate.queryForPage(ElasticsearchTemplate.java:139) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.repository.support.AbstractElasticsearchRepository.findAll(AbstractElasticsearchRepository.java:126) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.repository.support.AbstractElasticsearchRepository.findAll(AbstractElasticsearchRepository.java:120) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_211]
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) ~[na:1.8.0_211]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) ~[na:1.8.0_211]
at java.lang.reflect.Method.invoke(Unknown Source) ~[na:1.8.0_211]
at org.springframework.data.repository.core.support.RepositoryComposition$RepositoryFragments.invoke(RepositoryComposition.java:359) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.data.repository.core.support.RepositoryComposition.invoke(RepositoryComposition.java:200) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$ImplementationMethodExecutionInterceptor.invoke(RepositoryFactorySupport.java:644) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.4.RELEASE.jar:5.1.4.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.doInvoke(RepositoryFactorySupport.java:608) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.lambda$invoke$3(RepositoryFactorySupport.java:595) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.invoke(RepositoryFactorySupport.java:595) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.4.RELEASE.jar:5.1.4.RELEASE]
at org.springframework.data.projection.DefaultMethodInvokingMethodInterceptor.invoke(DefaultMethodInvokingMethodInterceptor.java:59) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.4.RELEASE.jar:5.1.4.RELEASE]
at org.springframework.aop.interceptor.ExposeInvocationInterceptor.invoke(ExposeInvocationInterceptor.java:93) ~[spring-aop-5.1.4.RELEASE.jar:5.1.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.4.RELEASE.jar:5.1.4.RELEASE]
at org.springframework.data.repository.core.support.SurroundingTransactionDetectorMethodInterceptor.invoke(SurroundingTransactionDetectorMethodInterceptor.java:61) ~[spring-data-commons-2.1.4.RELEASE.jar:2.1.4.RELEASE]
Caused by: com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot construct instance of `java.time.LocalDate` (no Creators, like default construct, exist): cannot deserialize from Object value (no delegate- or property-based Creator)
at [Source: (String)"{"id":73,"name":"Kelly","birthDate":{"year":2019,"month":"MAY","dayOfYear":150,"leapYear":false,"dayOfMonth":30,"dayOfWeek":"THURSDAY","era":"CE","monthValue":5,"chronology":{"id":"ISO","calendarType":"iso8601"}},"role...}},"lastModifiedBy""[truncated 396 chars]; line: 1, column: 38] (through reference chain: Employee["birthDate"])
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.DeserializationContext.handleMissingInstantiator(DeserializationContext.java:1028) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromObjectUsingNonDefault(BeanDeserializerBase.java:1297) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:326) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:159) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.impl.MethodProperty.deserializeAndSet(MethodProperty.java:127) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:288) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:151) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4013) ~[jackson-databind-2.9.8.jar:2.9.8]
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004) ~[jackson-databind-2.9.8.jar:2.9.8]
at org.springframework.data.elasticsearch.core.DefaultEntityMapper.mapToObject(DefaultEntityMapper.java:82) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
at org.springframework.data.elasticsearch.core.AbstractResultMapper.mapEntity(AbstractResultMapper.java:43) ~[spring-data-elasticsearch-3.1.4.RELEASE.jar:3.1.4.RELEASE]
... 34 common frames omitted
Employee:
public class Employee extends AbstractAuditingEntity implements Serializable{
private static final long serialVersionUID = 1L;
#Id
#GeneratedValue
private Long id;
//Remaining Fields are removed for brevity
AbstractAuditEntity
#MappedSuperclass
#Audited
#EntityListeners({AuditingEntityListener.class, EntityAuditEventListener.class})
public abstract class AbstractAuditingEntity implements Serializable {
private static final long serialVersionUID = 1L;
#CreatedBy
#Column(name = "created_by", nullable = true, length = 50, updatable = false)
private String createdBy;
#CreatedDate
#Column(name = "created_date")
private LocalDateTime createdDate = LocalDateTime.now();
#LastModifiedBy
#Column(name = "last_modified_by", length = 50)
private String lastModifiedBy;
#LastModifiedDate
#Column(name = "last_modified_date")
private LocalDateTime lastModifiedDate = LocalDateTime.now();

Your problem probably will be because Jackson does not support Java 8 DateTime Api out of the box.
So you can add the following library to add support for that (add in pom.xml):
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.4.0</version>
</dependency>
and configure object mapper to use this library module like :
ObjectMapper mapper = new ObjectMapper()
.registerModule(new ParameterNamesModule())
.registerModule(new Jdk8Module())
.registerModule(new JavaTimeModule());
Use the link for reference :
https://github.com/FasterXML/jackson-modules-java8

I have successfully managed to use java.time types in Spring Data Elasticsearch with the following configuration:
#Configuration
#EnableElasticsearchRepositories(basePackages = "org.example.search.repository")
public class ElasticsearchConfig {
#Bean
public ElasticsearchTemplate elasticsearchTemplate(Client client) {
return new ElasticsearchTemplate(client, new CustomEntityMapper());
}
static class CustomEntityMapper implements EntityMapper {
private final ObjectMapper mapper;
CustomEntityMapper() {
mapper = new ObjectMapper()
.setDefaultPropertyInclusion(Include.NON_NULL)
.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
.enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY)
.findAndRegisterModules();
}
#Override
public String mapToString(Object object) throws IOException {
return mapper.writeValueAsString(object);
}
#Override
public <T> T mapToObject(String source, Class<T> clazz) throws IOException {
return mapper.readValue(source, clazz);
}
}
}

It is more of a workaround but by adding the annotation mentioned below over the respective localdate/localdatetime fields.
#JsonDeserialize(using = LocalDateTimeDeserializer.class)
#JsonSerialize(using = LocalDateTimeSerializer.class)

For those who are looking for a solution on version ES-8 and Spring Boot:3.0
Create a configuration class extending ElasticsearchConfiguration and override clientConfiguration and elasticsearchClient creation.
During elasticsearchClient creation inject your own objectMapper configured to use Java 8 time module, which will override the default objectMapper.
#Override
public ClientConfiguration clientConfiguration() {
return ClientConfiguration.builder()
.connectedTo(<Hostname> +":"+ <Port>)
.usingSsl()
.withBasicAuth(<Username>, <Password>)
.build();
}
#Override
public ElasticsearchClient elasticsearchClient(RestClient restClient) {
Assert.notNull(restClient, "restClient must not be null");
//Create Java8 time module
JavaTimeModule module = new JavaTimeModule();
module.addSerializer(new LocalDateTimeSerializer(DateTimeFormatter.ofPattern(DateFormat.date_time_no_millis.getPattern())));
//Register the module with objectMapper
ObjectMapper objectMapper=new ObjectMapper()
.registerModule(module);
//To convert datetime to ISO-8601
objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
//Creating our own jsonpMapper
JsonpMapper jsonpMapper=new JacksonJsonpMapper(objectMapper);
// Create the transport with a Jackson mapper
ElasticsearchTransport transport = new RestClientTransport(
restClient, jsonpMapper);
// And create the API client
return new ElasticsearchClient(transport);
}
Maven dependency:
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>2.14.0</version>
</dependency>

Related

Cannot convert from [java.lang.String] to [com.example.demo.User]

I'm working on Spring Boot and Apache Kafka - trying to have a user defined configurations -
org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method could not be invoked with the incoming message
Endpoint handler details:
Method [public void com.example.demo.Consumer.consume(com.example.demo.User) throws java.io.IOException]
Bean [com.example.demo.Consumer#7cd4a8cc]; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot handle message; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [java.lang.String] to [com.example.demo.User] for GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}], failedMessage=GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}]; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot handle message; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [java.lang.String] to [com.example.demo.User] for GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}], failedMessage=GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2110) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeErrorHandler(KafkaMessageListenerContainer.java:2098) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:1997) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:1924) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:1812) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1531) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1178) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1075) ~[spring-kafka-2.6.5.jar:2.6.5]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_171]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_171]
at java.lang.Thread.run(Thread.java:748) ~[na:1.8.0_171]
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot handle message; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [java.lang.String] to [com.example.demo.User] for GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}], failedMessage=GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:340) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:86) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:51) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2065) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2047) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:1984) ~[spring-kafka-2.6.5.jar:2.6.5]
... 8 common frames omitted
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot convert from [java.lang.String] to [com.example.demo.User] for GenericMessage [payload={"name":"Prateek","age":33}, headers={kafka_offset=7, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#32375775, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=0, kafka_receivedTopic=users, kafka_receivedTimestamp=1613708323636, __TypeId__=[B#165438c, kafka_groupId=group_id}]
at org.springframework.messaging.handler.annotation.support.PayloadMethodArgumentResolver.resolveArgument(PayloadMethodArgumentResolver.java:145) ~[spring-messaging-5.3.3.jar:5.3.3]
at org.springframework.kafka.annotation.KafkaListenerAnnotationBeanPostProcessor$KafkaNullAwarePayloadArgumentResolver.resolveArgument(KafkaListenerAnnotationBeanPostProcessor.java:926) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolverComposite.resolveArgument(HandlerMethodArgumentResolverComposite.java:117) ~[spring-messaging-5.3.3.jar:5.3.3]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.getMethodArgumentValues(InvocableHandlerMethod.java:148) ~[spring-messaging-5.3.3.jar:5.3.3]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:116) ~[spring-messaging-5.3.3.jar:5.3.3]
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:48) ~[spring-kafka-2.6.5.jar:2.6.5]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:329) ~[spring-kafka-2.6.5.jar:2.6.5]
... 13 common frames omitted
How can we solve this?
Below is my code -
User.java
#Builder
#Data
#NoArgsConstructor
#AllArgsConstructor
public class User {
private String name;
private int age;
}
KafkaProducerConfig.java
#Configuration
public class KafkaProducerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
// 1. Send string to Kafka
#Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
// 2. Send User objects to Kafka
#Bean
public ProducerFactory<String, User> userProducerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
return new DefaultKafkaProducerFactory<>(configProps);
}
#Bean
public KafkaTemplate<String, User> userKafkaTemplate() {
return new KafkaTemplate<>(userProducerFactory());
}
}
Producer.java
#Service
public class Producer {
private static final Logger logger = LoggerFactory.getLogger(Producer.class);
private static final String TOPIC = "users";
#Autowired
private KafkaTemplate<String, User> kafkaTemplate;
public void sendMessage(User user) {
logger.info(String.format("#### -> Producing message -> %s", user.toString()));
this.kafkaTemplate.send(TOPIC, user);
}
}
KafkaConsumerConfig.java
#Configuration
public class KafkaConsumerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
#Value(value = "${general.topic.group.id}")
private String groupId;
#Value(value = "${user.topic.group.id}")
private String userGroupId;
// 1. Consume string data from Kafka
#Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*");
return new DefaultKafkaConsumerFactory<>(props);
}
// 2. Consume user objects from Kafka
public ConsumerFactory<String, User> userConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, userGroupId);
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*");
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(User.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, User> userKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, User> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(userConsumerFactory());
factory.setMessageConverter(new StringJsonMessageConverter());
return factory;
}
}
Consumer.java
#Service
public class Consumer {
private final Logger logger = LoggerFactory.getLogger(Producer.class);
#KafkaListener(topics = "users", groupId = "group_id")
public void consume(User user) throws IOException {
logger.info(String.format("#### -> Consumed message -> %s", user.toString()));
}
}
KafkaController.java
#RestController
#RequestMapping(value = "/kafka")
public class KafkaController {
private final Producer producer;
#Autowired
KafkaController(Producer producer) {
this.producer = producer;
}
#PostMapping(value = "/publish")
public void sendMessageToKafkaTopic(#RequestBody User user) {
this.producer.sendMessage(user);
}
}
KafkaExampleApplication.java
#SpringBootApplication
public class KafkaExampleApplication {
public static void main(String[] args) {
SpringApplication.run(KafkaExampleApplication.class, args);
}
}
application.properties
server.port=9000
kafka.bootstrapAddress=localhost:9092
general.topic.group.id=group_id
user.topic.group.id=MyGrpId
Try to add this into your #KafkaListener annotation, like in second clause in #Lotzy answer:
containerFactory = userKafkaListenerContainerFactory
In your Producer class, you need to rename the kafkaTemplate to userKafkaTemplate, which is wired to the User producer factory with the JSON serializer you want to use.
In fact, you should probably remove the other template and producer factory if you're not using them
In the KafkaConsumerConfig.java configuration of bean userConsumerFactory() you are using userGroupId="MyGrpId" via #Value wiring from property user.topic.group.id=MyGrpId. But then in the Consumer.java at the #KafkaListener annotation, it is specified groupId = "group_id".
In the same annotation, would be better to specify the containerFactory instead of the groupId, like this #KafkaListener(topics = "users", containerFactory = "userKafkaListenerContainerFactory"). This way the annotation will wire exactly the wanted ConsumerFactory bean configuration via the ConcurrentKafkaListenerContainerFactory.
Also, I agree with #OneCricketeer, that in the Producer you should qualify the autowired kafkaTemplate to the one that produces User objects of you later want to consume JSON deserializable to User objects, like this:
#Autowired
#Qualifier("userKafkaTemplate")
private KafkaTemplate<String, User> kafkaTemplate;
More details here: Baeldung - Apache Kafka with Spring
You should add this to your properties to let deserializer know what to cast into
properties.put(JsonDeserializer.VALUE_DEFAULT_TYPE,com.example.demo.User.class);

InvalidFormatException for Date - fixing without using JsonFormat or modifying original class

Introduction
We are using a custom starter hosted on a nexus repository, that contains spring-cloud-feign clients that make requests to microservices.
One of the microservices returns the dates as "dd-MM-yyyy HH:mm:ssZ" and this works in most of our applications. However, we have one application that is throwing the following error:
Caused by: com.fasterxml.jackson.databind.exc.InvalidFormatException: Cannot deserialize value of type `java.util.Date` from String "2019-10-16 14:23:17": not a valid representation (error: Failed to parse Date value '2019-10-16 14:23:17': Unparseable date: "2019-10-16 14:23:1
7")
Current work-around
My Current work-around, as I don't want to pollute the starter, is to extend the class and create a local feign-client and local pojo with the proper JsonFormat:
public class DocumentMetaDataFix extends DocumentMetaData {
#JsonFormat(
shape = Shape.STRING,
pattern = "yyyy-MM-dd HH:mm:ss"
)
private Date creationDate;
#JsonFormat(
shape = Shape.STRING,
pattern = "yyyy-MM-dd HH:mm:ss"
)
Failed Fixes
I have tried the following in my configuration class, in order to try affecting the de-serialization from another path. However, the DocumentMetaDataSerializer is never called. The ObjectMapper bean IS called.
#Configuration
#EnableSpringDataWebSupport
#RequiredArgsConstructor
public class MyConfig extends WebMvcConfigurerAdapter {
#Bean
public Jackson2ObjectMapperBuilderCustomizer addCustomBigDecimalDeserialization() {
return new Jackson2ObjectMapperBuilderCustomizer() {
#Override
public void customize(Jackson2ObjectMapperBuilder jacksonObjectMapperBuilder) {
jacksonObjectMapperBuilder.deserializerByType(DocumentMetaData.class, new DocumentMetaDataDeserializer());
}
};
}
#Primary
#Bean
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true);
mapper.setDateFormat(new SimpleDateFormat("dd-MM-yyyy HH:mm:ss"));
//mapper.configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true);
return mapper;
}
#Bean
public Module dynamoDemoEntityDeserializer() {
SimpleModule module = new SimpleModule();
module.addDeserializer(DocumentMetaData.class, new DocumentMetaDataDeserializer());
return module;
}
public static class DocumentMetaDataDeserializer extends JsonDeserializer<DocumentMetaData> {
#Override
public DocumentMetaData deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
// return DynamoDemoEntity instance;
JsonNode node = jp.getCodec().readTree(jp);
return null;
}
public DocumentMetaData deserializeWithType(JsonParser jp, DeserializationContext ctxt, TypeDeserializer t) throws IOException {
JsonNode node = jp.getCodec().readTree(jp);
return null;
}
}
Full Stacktrace
Caused by: com.fasterxml.jackson.databind.exc.InvalidFormatException: Cannot deserialize value of type `java.util.Date` from String "2019-10-16 14:23:17": not a valid representation (error: Failed to parse Date value '2019-10-16 14:23:17': Unparseable date: "2019-10-16 14:23:1
7")
at [Source: (ByteArrayInputStream); line: 1, column: 580] (through reference chain: eu.europa.ec.nova.documentstore.DocumentMetaData["creationDate"])
at com.fasterxml.jackson.databind.exc.InvalidFormatException.from(InvalidFormatException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.weirdStringException(DeserializationContext.java:1548)
at com.fasterxml.jackson.databind.DeserializationContext.handleWeirdStringValue(DeserializationContext.java:910)
at com.fasterxml.jackson.databind.deser.std.StdDeserializer._parseDate(StdDeserializer.java:524)
at com.fasterxml.jackson.databind.deser.std.StdDeserializer._parseDate(StdDeserializer.java:467)
at com.fasterxml.jackson.databind.deser.std.DateDeserializers$DateBasedDeserializer._parseDate(DateDeserializers.java:195)
at com.fasterxml.jackson.databind.deser.std.DateDeserializers$DateDeserializer.deserialize(DateDeserializers.java:285)
at com.fasterxml.jackson.databind.deser.std.DateDeserializers$DateDeserializer.deserialize(DateDeserializers.java:268)
at com.fasterxml.jackson.databind.deser.impl.MethodProperty.deserializeAndSet(MethodProperty.java:127)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:288)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:151)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4013)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3084)
at org.springframework.http.converter.json.AbstractJackson2HttpMessageConverter.readJavaType(AbstractJackson2HttpMessageConverter.java:237)
... 70 common frames omitted
So, any ideas?
I have searched through the project for references to Jackson in case there is anything else in my project causing this.
I am will try to go inside the ObjectMapper and try to debug the current parameters/fields of the configuration at ObjectMapper.java:3084 from the stacktace:
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3084)
at org.springframework.http.converter.json.AbstractJackson2HttpMessageConverter.readJavaType(AbstractJackson2HttpMessageConverter.java:237)
... 67 common frames omitted
Update
I added a breakpoint in the objectmapper constructor, and am seeing that it is being initialized from more than one location. This led me to suspect that spring-boot is not using my ObjectMapper. Instead it is using an internal spring one that is called from MappingJackson2HttpMessageConverter .
<init>:480, ObjectMapper
build:606, Jackson2ObjectMapperBuilder
<init>:59, MappingJackson2HttpMessageConverter
<init>:74, AllEncompassingFormHttpMessageConverter
I will therefore try to over-ride this internal spring one, based on results I found from: How to customise the Jackson JSON mapper implicitly used by Spring Boot?
However this also failed.
References
Is it possible to configure Jackson custom deserializers at class level for different data types?
https://docs.spring.io/spring-boot/docs/current/reference/html/howto-spring-mvc.html#howto-customize-the-jackson-objectmapper
https://www.baeldung.com/jackson-deserialization
very useful: https://mostafa-asg.github.io/post/customize-json-xml-spring-mvc-output/
How to customise Jackson in Spring Boot 1.4
Update - final list of tries
It still fails with an error.
#Override
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
Jackson2ObjectMapperBuilder builder = new Jackson2ObjectMapperBuilder();
builder.serializationInclusion(JsonInclude.Include.NON_NULL);
builder.propertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
builder.serializationInclusion(Include.NON_EMPTY);
builder.indentOutput(true).dateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
converters.add(new MappingJackson2HttpMessageConverter(builder.build()));
converters.add(new MappingJackson2XmlHttpMessageConverter(builder.createXmlMapper(true).build()));
//converters.add(cmsaMessageConverter());
converters.add(new StringHttpMessageConverter());
converters.add(new FormHttpMessageConverter());
converters.add(new MappingJackson2HttpMessageConverter());
}
#Bean
public Jackson2ObjectMapperBuilderCustomizer addCustomBigDecimalDeserialization() {
return new Jackson2ObjectMapperBuilderCustomizer() {
#Override
public void customize(Jackson2ObjectMapperBuilder jacksonObjectMapperBuilder) {
jacksonObjectMapperBuilder.deserializerByType(DocumentMetaData.class, new DocumentMetaDataDeserializer());
}
};
}
#Primary
#Bean
public ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, true);
mapper.setDateFormat(new SimpleDateFormat("dd-MM-yyyy HH:mm:ss"));
//mapper.configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true);
return mapper;
}
#Bean
public Module dynamoDemoEntityDeserializer() {
SimpleModule module = new SimpleModule();
module.addDeserializer(DocumentMetaData.class, new DocumentMetaDataDeserializer());
return module;
}
public static class DocumentMetaDataDeserializer extends JsonDeserializer<DocumentMetaData> {
#Override
public DocumentMetaData deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException, JsonProcessingException {
// return DynamoDemoEntity instance;
JsonNode node = jp.getCodec().readTree(jp);
return null;
}
public DocumentMetaData deserializeWithType(JsonParser jp, DeserializationContext ctxt, TypeDeserializer t) throws IOException {
JsonNode node = jp.getCodec().readTree(jp);
return null;
}
}
It still fails with an error.
Try using LocalDateTime,
this is what I'm doing and working for me
#JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private LocalDateTime date;

Introducing Spring Batch fails Dao-Test

I am using Spring Batch in a Spring Boot application as below. Spring Batch and the application seem to work fine with that configuration.
However, with that configuration for a simple Dao-Test (not for Spring Batch) I get the following exception. Without the Spring Batch configuration the test is running fine. Update: The problem appears if I configure to use an own JobRepository with a transaction manager (below in class MyBatchConfigurer).
I tried to provide another transaction manager for Spring Batch but I am running from exception to exception.
org.springframework.dao.InvalidDataAccessApiUsageException: no transaction is in progress; nested exception is javax.persistence.TransactionRequiredException: no transaction is in progress
Caused by: javax.persistence.TransactionRequiredException: no transaction is in progress
at org.hibernate.internal.SessionImpl.checkTransactionNeeded(SessionImpl.java:3505)
at org.hibernate.internal.SessionImpl.doFlush(SessionImpl.java:1427)
at org.hibernate.internal.SessionImpl.flush(SessionImpl.java:1423)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.orm.jpa.ExtendedEntityManagerCreator$ExtendedEntityManagerInvocationHandler.invoke(ExtendedEntityManagerCreator.java:350)
at com.sun.proxy.$Proxy165.flush(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.orm.jpa.SharedEntityManagerCreator$SharedEntityManagerInvocationHandler.invoke(SharedEntityManagerCreator.java:305)
at com.sun.proxy.$Proxy165.flush(Unknown Source)
at com.foo.dao.GenericDao.save(GenericDao.java:60)
at com.foo.dao.GenericDao$$FastClassBySpringCGLIB$$71a0996b.invoke(<generated>)
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:204)
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:746)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163)
at org.springframework.dao.support.PersistenceExceptionTranslationInterceptor.invoke(PersistenceExceptionTranslationInterceptor.java:139)
... 37 more
Test Setup
#SpringBootTest
#RunWith(SpringRunner.class)
#EntityScan(basePackages = "com.foo.entity")
#DirtiesContext(classMode = DirtiesContext.ClassMode.BEFORE_EACH_TEST_METHOD)
#TestPropertySource("/mytest.properties")
#Sql(executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD, scripts = "classpath:/testdata.sql")
public class MyTest {
#Inject
private OrderDao sut;
#Test
public void test_findByAnotherFieldId() {
final Order order = // build an order ...
sut.save(order);
final Order result = sut.findByAnotherFieldId("valueOfOtherField");
assertThat(result).isEqualTo(order);
}
}
Spring Batch Job configuration
#Configuration
#EnableBatchProcessing
#Import(OracleDatabaseConfig.class)
#ComponentScan(basePackageClasses = MyBatchConfigurer.class)
public class BatchJobConfig {
// Injects...
#Qualifier("analyseOrdersJob")
#Bean
public Job analyseOrdersJob() {
return jobBuilderFactory.get("analyseOrdersJob").start(analyseOrdersStep()).build();
}
#Bean
public Step analyseOrdersStep() {
return stepBuilderFactory.get("analyseOrdersStep").<Order, String>chunk(4)
.reader(orderItemReader) //
.processor(orderItemProcessor) //
.writer(orderItemWriter) //
.build();
}
}
Spring Batch configuration
#Component
public class MyBatchConfigurer extends DefaultBatchConfigurer {
#Inject
private DataSource dataSource;
#Override
public JobRepository getJobRepository() {
try {
return extractJobRepository();
} catch (final Exception e) {
throw new BatchConfigurationException(e);
}
}
private JobRepository extractJobRepository() throws Exception {
final JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(dataSource);
factory.setTransactionManager(getTransactionManager());
factory.afterPropertiesSet();
return factory.getObject();
}
}
ItemReader
#Component
#StepScope
public class OrderItemReader implements ItemReader<Order> {
#Inject
private OrderDao orderdao;
private int nextOrderIndex;
private List<Order> orders;
#PostConstruct
public void postConstruct() {
orders = orderdao.findAll();
}
#Override
public Order read() {
if (nextOrderIndex < orders.size()) {
final Order order = orders.get(nextOrderIndex);
nextOrderIndex++;
return order;
}
return null;
}
}
ItemWriter and ItemProcessor are similarly configured.

Spring MongoDB - org.bson.codecs.configuration.CodecConfigurationException: Can't find a codec for class java.lang.Class

I am evaluating MongoDB and Spring Data MongoDB in a spring boot application for one of our upcoming project. I have a simple POJO which would be saved in MongoDB
#Document(collection = "customers")
public class CustomerDoc{
#org.springframework.data.annotation.Id
private String Id;
private String firstName;
private String lastName;
private MonetaryAmount amount;
public CustomerDoc(String firstName, String lastName, MonetaryAmount amount){
this.firstName = firstName;
this.lastName = lastName;
this.amount = amount;
}
}
The Spring configuration is
#Bean
public MongoDbFactory mongoDbFactory() throws Exception {
MongoClient mongoClient = new MongoClient("localhost", 27017);
return new SimpleMongoDbFactory(mongoClient, mongoDbName);
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
return new MongoTemplate(mongoDbFactory(), mongoConverter());
}
#Bean
public MappingMongoConverter mongoConverter() throws Exception {
MongoMappingContext mappingContext = new MongoMappingContext();
DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory());
MappingMongoConverter mongoConverter = new MappingMongoConverter(dbRefResolver, mappingContext);
mongoConverter.setMapKeyDotReplacement("\\:");
return mongoConverter;
}
In the configuration, I have to set the replacement key because without having it, I get exception:
org.springframework.data.mapping.model.MappingException: Map key java.lang.Class contains dots but no replacement was configured! Make sure map keys don't contain dots in the first place or configure an appropriate replacement!
This happens because of the MonetaryAmount object in the Customer class
The customer repository is :
public interface CustomerDocRepository extends MongoRepository<CustomerDoc, String>{
public List<CustomerDoc> findByFirstName();
}
Now if I try to save a customer object, I get this exception:
Exception in thread "main" java.lang.IllegalStateException: Failed to execute CommandLineRunner
at org.springframework.boot.SpringApplication.runCommandLineRunners(SpringApplication.java:675)
at org.springframework.boot.SpringApplication.afterRefresh(SpringApplication.java:690)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:321)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:957)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:946)
at com.poc.mongodb.MongoDbApp.main(MongoDbApp.java:43)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)
Caused by: org.bson.codecs.configuration.CodecConfigurationException: Can't find a codec for class java.lang.Class.
at org.bson.codecs.configuration.CodecCache.getOrThrow(CodecCache.java:46)
at org.bson.codecs.configuration.ProvidersCodecRegistry.get(ProvidersCodecRegistry.java:63)
at org.bson.codecs.configuration.ProvidersCodecRegistry.get(ProvidersCodecRegistry.java:37)
at com.mongodb.DBObjectCodec.writeValue(DBObjectCodec.java:210)
at com.mongodb.DBObjectCodec.encodeMap(DBObjectCodec.java:220)
at com.mongodb.DBObjectCodec.writeValue(DBObjectCodec.java:196)
at com.mongodb.DBObjectCodec.encodeMap(DBObjectCodec.java:220)
at com.mongodb.DBObjectCodec.writeValue(DBObjectCodec.java:196)
at com.mongodb.DBObjectCodec.encodeMap(DBObjectCodec.java:220)
at com.mongodb.DBObjectCodec.writeValue(DBObjectCodec.java:196)
at com.mongodb.DBObjectCodec.encode(DBObjectCodec.java:128)
at com.mongodb.DBObjectCodec.encode(DBObjectCodec.java:61)
at com.mongodb.CompoundDBObjectCodec.encode(CompoundDBObjectCodec.java:48)
at com.mongodb.CompoundDBObjectCodec.encode(CompoundDBObjectCodec.java:27)
at org.bson.codecs.BsonDocumentWrapperCodec.encode(BsonDocumentWrapperCodec.java:63)
at org.bson.codecs.BsonDocumentWrapperCodec.encode(BsonDocumentWrapperCodec.java:29)
at com.mongodb.connection.InsertCommandMessage.writeTheWrites(InsertCommandMessage.java:101)
at com.mongodb.connection.InsertCommandMessage.writeTheWrites(InsertCommandMessage.java:43)
at com.mongodb.connection.BaseWriteCommandMessage.encodeMessageBodyWithMetadata(BaseWriteCommandMessage.java:129)
at com.mongodb.connection.RequestMessage.encodeWithMetadata(RequestMessage.java:160)
at com.mongodb.connection.WriteCommandProtocol.sendMessage(WriteCommandProtocol.java:212)
at com.mongodb.connection.WriteCommandProtocol.execute(WriteCommandProtocol.java:101)
at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:67)
at com.mongodb.connection.InsertCommandProtocol.execute(InsertCommandProtocol.java:37)
at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
at com.mongodb.connection.DefaultServerConnection.insertCommand(DefaultServerConnection.java:115)
at com.mongodb.operation.InsertOperation.executeCommandProtocol(InsertOperation.java:76)
at com.mongodb.operation.BaseWriteOperation$1.call(BaseWriteOperation.java:141)
at com.mongodb.operation.BaseWriteOperation$1.call(BaseWriteOperation.java:133)
at com.mongodb.operation.OperationHelper.withConnectionSource(OperationHelper.java:230)
at com.mongodb.operation.OperationHelper.withConnection(OperationHelper.java:221)
at com.mongodb.operation.BaseWriteOperation.execute(BaseWriteOperation.java:133)
at com.mongodb.operation.BaseWriteOperation.execute(BaseWriteOperation.java:60)
at com.mongodb.Mongo.execute(Mongo.java:781)
at com.mongodb.Mongo$2.execute(Mongo.java:764)
at com.mongodb.DBCollection.executeWriteOperation(DBCollection.java:333)
at com.mongodb.DBCollection.insert(DBCollection.java:328)
at com.mongodb.DBCollection.insert(DBCollection.java:319)
at com.mongodb.DBCollection.insert(DBCollection.java:289)
at com.mongodb.DBCollection.insert(DBCollection.java:255)
at com.mongodb.DBCollection.insert(DBCollection.java:239)
at org.springframework.data.mongodb.core.MongoTemplate$10.doInCollection(MongoTemplate.java:1054)
at org.springframework.data.mongodb.core.MongoTemplate$10.doInCollection(MongoTemplate.java:1049)
at org.springframework.data.mongodb.core.MongoTemplate.execute(MongoTemplate.java:462)
at org.springframework.data.mongodb.core.MongoTemplate.insertDBObjectList(MongoTemplate.java:1049)
at org.springframework.data.mongodb.core.MongoTemplate.doInsertBatch(MongoTemplate.java:935)
I am using mongo driver 3.2.2 Spring data MongoDB 1.9.1
Any help would be really appreciated

Autowired JpaRepository Save Ignored in custom FieldSetMapper

In brief, I am trying to use JpaRepositories in spring batch related classes. But due to some reason, only selects are working but Insert/updates are ignored.
I have a custom FieldSetMapper as below,
#Component
#StepScope // I have not seen step scope is used as this way!
public class ItemFieldSetMapper implements FieldSetMapper<CategoryItem> {
private CSVSchema schema;
private Task task;
Logger logger = Logger.getLogger(LocalPersist.class.getName());
#Autowired
private CSVColumnDao csvColumnDao;
#Autowired
private BasisCategoryDao basisCategoryDao;
#Override
public BasisCategoryItem mapFieldSet(FieldSet fieldSet) throws BindException {
// csvColumnDao.save(someobject) // This is ignored
}
}
Since I have to start the job asynchronously (Job is started form controller method)
I created a custom DefaultBatchConfigurer as below,
#Component
#EnableBatchProcessing
public class ProductImportBatchConfig extends DefaultBatchConfigurer {
#Autowired
private TaskExecutor taskExecutor; // This is coming form another bean defined in another configuration class.
/*#Autowired
private DataSource dataSource;*/
#Autowired
private PlatformTransactionManager transactionManager;
#Override
protected JobLauncher createJobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(super.getJobRepository());
jobLauncher.setTaskExecutor(taskExecutor);
return jobLauncher;
}
#Override
public PlatformTransactionManager getTransactionManager() {
// TODO Auto-generated method stub
return transactionManager;
}
#Override
#Autowired
public void setDataSource(DataSource dataSource) {
DatabasePopulatorUtils.execute(dropDatabasePopulator(), dataSource);
DatabasePopulatorUtils.execute(createDatabasePopulator(), dataSource);
// TODO Auto-generated method stub
super.setDataSource(dataSource);
}
private DatabasePopulator dropDatabasePopulator() {
ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
databasePopulator.setContinueOnError(true);
databasePopulator.addScript(new ClassPathResource("org/springframework/batch/core/schema-drop-mysql.sql"));
return databasePopulator;
}
private DatabasePopulator createDatabasePopulator() {
ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
databasePopulator.setContinueOnError(true);
databasePopulator.addScript(new ClassPathResource("org/springframework/batch/core/schema-mysql.sql"));
return databasePopulator;
}
}
This is my Application class,
#Configuration
#ComponentScan
#EnableAutoConfiguration(exclude = { BatchAutoConfiguration.class })
public class Application extends SpringBootServletInitializer {
private static Class<Application> applicationClass = Application.class;
public static void main(String[] args) {
SpringApplication.run(applicationClass, args);
}
#Override
protected final SpringApplicationBuilder configure(final SpringApplicationBuilder application) {
return application.sources(applicationClass);
}
#Bean
public ThreadPoolTaskExecutor taskExecutor() {
ThreadPoolTaskExecutor pool = new ThreadPoolTaskExecutor();
pool.setCorePoolSize(6);
pool.setMaxPoolSize(30);
pool.setWaitForTasksToCompleteOnShutdown(false);
return pool;
}
}
What I have figured so far:
I went through many documentation to get my scenario working. This is the closest I got.
The issue is to do with #EnableBatchProcessing. It creates its own DataSourceTransactionManager which has no clue about jpa/hibernate and ignore inserts/updates.
As the suggested solution,
#Bean
public BatchConfigurer batchConfigurer(DataSource dataSource, EntityManagerFactory entityManagerFactory) {
return new BasicBatchConfigurer(dataSource, entityManagerFactory);
}
This seems to be not working because the constructor changed in newer versions.
It seems like, I have to get rid of the #EnableBatchProcessing and get the same transaction manager to work both in application and batch contexts.
Here are some trial and errors,
If I keep the #EnableBatchProcessing and use #EnableAutoConfiguration (exclude = { BatchAutoConfiguration.class })
The Batch tables are created and application runs correctly, But during the run time I get the following.
java.lang.StackOverflowError: null
at java.util.concurrent.ConcurrentHashMap.get(ConcurrentHashMap.java:936) ~[na:1.8.0_72]
at org.springframework.aop.framework.AdvisedSupport.getInterceptorsAndDynamicInterceptionAdvice(AdvisedSupport.java:487) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:193) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at com.sun.proxy.$Proxy126.getTransaction(Unknown Source) ~[na:na]
at sun.reflect.GeneratedMethodAccessor110.invoke(Unknown Source) ~[na:na]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_72]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_72]
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:302) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:190) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:157) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.batch.core.configuration.annotation.SimpleBatchConfiguration$PassthruAdvice.invoke(SimpleBatchConfiguration.java:127) ~[spring-batch-core-3.0.6.RELEASE.jar:3.0.6.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:208) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at com.sun.proxy.$Proxy126.getTransaction(Unknown Source) ~[na:na]
at sun.reflect.GeneratedMethodAccessor110.invoke(Unknown Source) ~[na:na]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_72]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_72]
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:302) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:190) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:157) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.batch.core.configuration.annotation.SimpleBatchConfiguration$PassthruAdvice.invoke(SimpleBatchConfiguration.java:127) ~[spring-batch-core-3.0.6.RELEASE.jar:3.0.6.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:208) ~[spring-aop-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at com.sun.proxy.$Proxy126.getTransaction(Unknown Source) ~[na:na]
at sun.reflect.GeneratedMethodAccessor110.invoke(Unknown Source) ~[na:na]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_72]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_72]
Here is the properties file I am using,
spring.datasource.url = jdbc:mysql://localhost:3306/db
spring.datasource.username =root
spring.datasource.password =root
spring.datasource.driverClassName = com.mysql.jdbc.Driver
spring.datasource.validationQuery = SELECT 1
spring.datasource.timeBetweenEvictionRunsMillis = 3600
spring.datasource.minEvictableIdleTimeMillis = 3600
spring.datasource.testWhileIdle = true
spring.jpa.database = MYSQL
spring.jpa.show-sql=false
spring.jpa.hibernate.ddl-auto =update
spring.jpa.hibernate.dialect = org.hibernate.dialect.MySQL5Dialect
spring.jpa.hibernate.naming_strategy = org.hibernate.cfg.ImprovedNamingStrategy
spring.batch.job.enabled=false
spring.view.prefix=/WEB-INF/jsp/
spring.view.suffix=.jsp
But when I comment #EnableBatchProcessing the application complains for
JobBuilderFactory bean and etc. It seems like use of same transaction manager is not a practice and I have not see auto wiring of Jpa Repositories in batch related classes (Eg, ItemWriters and Readers etc.). But I want this scenario to work since this is a migration from a different implementation to spring batch.
I was confused with the injecting jpa transaction manager into the spring batch. This is how I was able to do it.
#Configuration
#EnableBatchProcessing
public class ProductImportBatchConfig implements BatchConfigurer {
private static final Log logger = LogFactory.getLog(DefaultBatchConfigurer.class);
#Autowired
private TaskExecutor taskExecutor;
#Autowired
private DataSource dataSource;
private PlatformTransactionManager transactionManager;
#Autowired
private LocalContainerEntityManagerFactoryBean entityManagerFactory;
private JobRepository jobRepository;
private JobLauncher jobLauncher;
private JobExplorer jobExplorer;
protected ProductImportBatchConfig() {
}
#Override
public JobRepository getJobRepository() {
return jobRepository;
}
#Override
public PlatformTransactionManager getTransactionManager() {
return transactionManager;
}
#Override
public JobLauncher getJobLauncher() {
return jobLauncher;
}
#Override
public JobExplorer getJobExplorer() {
return jobExplorer;
}
#Autowired
public void setDataSource(DataSource dataSource) {
if (this.dataSource == null) {
logger.error(null, new Throwable("This is not acceptable"));
}
DatabasePopulatorUtils.execute(dropDatabasePopulator(), this.dataSource);
DatabasePopulatorUtils.execute(createDatabasePopulator(), this.dataSource);
this.dataSource = dataSource;
EntityManagerFactory object = entityManagerFactory.getObject();
JpaTransactionManager jpaTransactionManager = new JpaTransactionManager(object);
this.transactionManager = jpaTransactionManager;
}
#PostConstruct
public void initialize() {
try {
if (dataSource == null) {
logger.warn("No datasource was provided...using a Map based JobRepository");
if (this.transactionManager == null) {
this.transactionManager = new ResourcelessTransactionManager();
}
MapJobRepositoryFactoryBean jobRepositoryFactory = new MapJobRepositoryFactoryBean(
this.transactionManager);
jobRepositoryFactory.afterPropertiesSet();
this.jobRepository = jobRepositoryFactory.getObject();
MapJobExplorerFactoryBean jobExplorerFactory = new MapJobExplorerFactoryBean(jobRepositoryFactory);
jobExplorerFactory.afterPropertiesSet();
this.jobExplorer = jobExplorerFactory.getObject();
} else {
this.jobRepository = createJobRepository();
JobExplorerFactoryBean jobExplorerFactoryBean = new JobExplorerFactoryBean();
jobExplorerFactoryBean.setDataSource(this.dataSource);
jobExplorerFactoryBean.afterPropertiesSet();
this.jobExplorer = jobExplorerFactoryBean.getObject();
}
this.jobLauncher = createJobLauncher();
} catch (Exception e) {
throw new BatchConfigurationException(e);
}
}
public JobLauncher createJobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
jobLauncher.setTaskExecutor(taskExecutor);
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
public JobRepository createJobRepository() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(dataSource);
factory.setTransactionManager(transactionManager);
factory.afterPropertiesSet();
return factory.getObject();
}
// Batch related scripts
private DatabasePopulator dropDatabasePopulator() {
ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
databasePopulator.setContinueOnError(true);
databasePopulator.addScript(new ClassPathResource("org/springframework/batch/core/schema-drop-mysql.sql"));
return databasePopulator;
}
private DatabasePopulator createDatabasePopulator() {
ResourceDatabasePopulator databasePopulator = new ResourceDatabasePopulator();
databasePopulator.setContinueOnError(true);
databasePopulator.addScript(new ClassPathResource("org/springframework/batch/core/schema-mysql.sql"));
return databasePopulator;
}
}

Resources