Issue with SASL mechanism in Kafka - spring-boot

Use case -
Push data to Confluent cloud kafka from spring boot
Error -
java.lang.IllegalArgumentException: Could not find a 'KafkaClient' entry in the JAAS configuration. System property 'java.security.auth.login.config' is not set
I have gone through multiple discussions in SO and seems like I am not missing anything , below is my kafka config , I am still not sure why it's unable to create the client
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
//import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.security.plain.PlainLoginModule;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
public class KakfaConfiguration {
#Bean
public ProducerFactory<String, User> producerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "pkc-l7pr2.ap-south-1.aws.confluent.cloud:9092");
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
config.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
config.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
config.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username='test' password='tes1/FRi/BNfpRhWwLlxb573NdgpQZ6SCTZ';");
return new DefaultKafkaProducerFactory<>(config);
}
#Bean
public KafkaTemplate<String, User> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}
I am providing jaas config in the properties so not sure why it's unable to pick up the value. Any help is highly appreciated
Seems like Jaas values are not being passed , not sure what's the reason -
acks = 1
batch.size = 16384
block.on.buffer.full = false
bootstrap.servers = [pkc-l7pr2.ap-south-1.aws.confluent.cloud:9092]
buffer.memory = 33554432
client.id =
compression.type = none
connections.max.idle.ms = 540000
interceptor.classes = null
key.serializer = class org.apache.kafka.common.serialization.StringSerializer
linger.ms = 0
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.fetch.timeout.ms = 60000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 0
retry.backoff.ms = 100
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = PLAIN
security.protocol = SASL_PLAINTEXT
send.buffer.bytes = 131072
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
timeout.ms = 30000
value.serializer = class org.springframework.kafka.support.serializer.JsonSerializer
I am now able to resolve the issue after upgrading kafka client library to latest however in another program instead of adding the properties in config ,I am keeping in application.properties file and it's failing
Application.properties
spring.kafka.properties.sasl.mechanism=PLAIN
spring.kafka.properties.bootstrap.servers=pkc-l7pr2.ap-south-1.aws.confluent.cloud:9092
spring.kafka.properties.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username='test' password='test/FRi/BNfpRhWwLlxb573NdgpQZ6SCTZ';
spring.kafka.properties.security.protocol=SASL_SSL
# Best practice for higher availability in Apache Kafka clients prior to 3.0
spring.kafka.properties.session.timeout.ms=45000
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.IntegerSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
server.port=9099
Error
java.lang.NullPointerException: null
at io.pranav.kafkatest.Producer.lambda$1(KafkaTestApplication.java:61) ~[classes/:na]
at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:106) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.publisher.FluxZip$ZipCoordinator.drain(FluxZip.java:756) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.publisher.FluxZip$ZipInner.onNext(FluxZip.java:915) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.publisher.FluxInterval$IntervalRunnable.run(FluxInterval.java:125) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.scheduler.PeriodicWorkerTask.call(PeriodicWorkerTask.java:59) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.scheduler.PeriodicWorkerTask.run(PeriodicWorkerTask.java:73) ~[reactor-core-3.4.16.jar:3.4.16]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_291]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) ~[na:1.8.0_291]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) ~[na:1.8.0_291]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) ~[na:1.8.0_291]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_291]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_291]
at java.lang.Thread.run(Thread.java:748) ~[na:1.8.0_291]
Suppressed: java.lang.Exception: #block terminated with an error
at reactor.core.publisher.BlockingSingleSubscriber.blockingGet(BlockingSingleSubscriber.java:99) ~[reactor-core-3.4.16.jar:3.4.16]
at reactor.core.publisher.Flux.blockFirst(Flux.java:2600) ~[reactor-core-3.4.16.jar:3.4.16]
at io.pranav.kafkatest.Producer.generate(KafkaTestApplication.java:61) ~[classes/:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_291]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_291]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_291]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_291]
at org.springframework.context.event.ApplicationListenerMethodAdapter.doInvoke(ApplicationListenerMethodAdapter.java:344) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.event.ApplicationListenerMethodAdapter.processEvent(ApplicationListenerMethodAdapter.java:229) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.event.ApplicationListenerMethodAdapter.onApplicationEvent(ApplicationListenerMethodAdapter.java:166) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.event.SimpleApplicationEventMulticaster.doInvokeListener(SimpleApplicationEventMulticaster.java:176) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.event.SimpleApplicationEventMulticaster.invokeListener(SimpleApplicationEventMulticaster.java:169) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:143) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:421) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:378) ~[spring-context-5.3.18.jar:5.3.18]
at org.springframework.boot.context.event.EventPublishingRunListener.started(EventPublishingRunListener.java:108) ~[spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplicationRunListeners.lambda$started$5(SpringApplicationRunListeners.java:78) ~[spring-boot-2.6.6.jar:2.6.6]
at java.util.ArrayList.forEach(ArrayList.java:1259) ~[na:1.8.0_291]
at org.springframework.boot.SpringApplicationRunListeners.doWithListeners(SpringApplicationRunListeners.java:120) ~[spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplicationRunListeners.doWithListeners(SpringApplicationRunListeners.java:114) ~[spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplicationRunListeners.started(SpringApplicationRunListeners.java:78) ~[spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:309) [spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1312) [spring-boot-2.6.6.jar:2.6.6]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1301) [spring-boot-2.6.6.jar:2.6.6]
at io.pranav.kafkatest.KafkaTestApplication.main(KafkaTestApplication.java:25) [classes/:na]
Producer
class Producer{
private KafkaTemplate<Integer, String> template;
Faker faker;
#EventListener(ApplicationStartedEvent.class)
public void generate() {
faker = Faker.instance();
System.out.println(faker.hobbit().quote());
final Flux<Long> interval = Flux.interval(Duration.ofMillis(1_000));
System.out.println(interval);
System.out.println("Next Interval ..."+faker.random().nextInt(42));
final Flux<String> quotes = Flux.fromStream(Stream.generate(() -> faker.hobbit().quote()));
System.out.println("Quote is .." + quotes.toString());
Flux.zip(interval, quotes).map(it -> template.send("hobbit", 11, "hi") ).blockFirst();
System.out.println("Data pushed..");
}

sasl.jaas.conf was introduced in kafka-client 0.10.2. Can you check you have a recent kafka-clients release. The SASL_JAAS_CONFIG constant is located in the org.apache.kafka.common.config.SaslConfigs class

Related

JSON array serialization issue after spring-cloud-stream update

I use spring-cloud-stream-binder-kafka to connect to the kafka broker
I updated spring-cloud-dependencies version to 2021.0.1
I went from this
[INFO] +- org.springframework.cloud:spring-cloud-stream-binder-kafka:jar:3.0.11.RELEASE:compile
[INFO] | +- org.springframework.cloud:spring-cloud-stream-binder-kafka-core:jar:3.0.11.RELEASE:compile
[INFO] | | \- org.springframework.integration:spring-integration-kafka:jar:3.3.1.RELEASE:compile
[INFO] | +- org.apache.kafka:kafka-clients:jar:2.5.1:compile
[INFO] | \- org.springframework.kafka:spring-kafka:jar:2.5.11.RELEASE:compile
To this
[INFO] +- org.springframework.cloud:spring-cloud-stream-binder-kafka:jar:3.2.2:compile
[INFO] | +- org.springframework.cloud:spring-cloud-stream-binder-kafka-core:jar:3.2.2:compile
[INFO] | | \- org.springframework.integration:spring-integration-kafka:jar:5.5.10:compile
[INFO] | +- org.apache.kafka:kafka-clients:jar:3.0.1:compile
[INFO] | \- org.springframework.kafka:spring-kafka:jar:2.8.4:compile
Before the update everything worked fine, I consume JSON array message ([{}]) from topic (each message is an JSON array) and using ObjectMapper I convert Message<String> to a POJO List
#Bean
public Function<Message<String>, Message<Map<String, Object>>> process(HandleMessage handleMessage) {
return message -> handleMessage.onMessage(message);
}
But now after the update when it try to consume messages I got these exceptions :
Caused by: org.apache.kafka.common.errors.SerializationException: Can't convert value of class java.lang.String to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer
Caused by: java.lang.ClassCastException: class java.lang.String cannot be cast to class [B (java.lang.String and [B are in module java.base of loader 'bootstrap')
Caused by: org.apache.kafka.common.errors.SerializationException: Can't convert value of class reactor.core.publisher.FluxMapFuseable to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer
Caused by: java.lang.ClassCastException: class reactor.core.publisher.FluxMapFuseable cannot be cast to class [B (reactor.core.publisher.FluxMapFuseable is in unnamed module of loader 'app'; [B is in module java.base of loader 'bootstrap')
There the complete stacktrace :
org.springframework.messaging.MessageHandlingException: error occurred in message handler [org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder$ProducerConfigurationMessageHandler#8ff35b3]; nested exception is org.apache.kafka.common.errors.SerializationException: Can't convert value of class java.lang.String to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer
at org.springframework.integration.support.utils.IntegrationUtils.wrapInHandlingExceptionIfNecessary(IntegrationUtils.java:191)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:65)
at org.springframework.cloud.stream.binder.AbstractMessageChannelBinder$SendingHandler.handleMessageInternal(AbstractMessageChannelBinder.java:1074)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
at org.springframework.integration.dispatcher.AbstractDispatcher.tryOptimizedDispatch(AbstractDispatcher.java:115)
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:133)
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:109)
at org.springframework.integration.handler.AbstractMessageProducingHandler.sendOutput(AbstractMessageProducingHandler.java:457)
at org.springframework.integration.handler.AbstractMessageProducingHandler.doProduceOutput(AbstractMessageProducingHandler.java:325)
at org.springframework.integration.handler.AbstractMessageProducingHandler.produceOutput(AbstractMessageProducingHandler.java:268)
at org.springframework.integration.handler.AbstractMessageProducingHandler.sendOutputs(AbstractMessageProducingHandler.java:232)
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:142)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
at org.springframework.integration.dispatcher.BroadcastingDispatcher.invokeHandler(BroadcastingDispatcher.java:222)
at org.springframework.integration.dispatcher.BroadcastingDispatcher.dispatch(BroadcastingDispatcher.java:178)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:109)
at org.springframework.integration.handler.AbstractMessageProducingHandler.sendOutput(AbstractMessageProducingHandler.java:457)
at org.springframework.integration.handler.AbstractMessageProducingHandler.doProduceOutput(AbstractMessageProducingHandler.java:325)
at org.springframework.integration.handler.AbstractMessageProducingHandler.produceOutput(AbstractMessageProducingHandler.java:268)
at org.springframework.integration.handler.AbstractMessageProducingHandler.sendOutputs(AbstractMessageProducingHandler.java:232)
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:142)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
at org.springframework.integration.dispatcher.BroadcastingDispatcher.invokeHandler(BroadcastingDispatcher.java:222)
at org.springframework.integration.dispatcher.BroadcastingDispatcher.dispatch(BroadcastingDispatcher.java:178)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:109)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:99)
at org.springframework.integration.core.ErrorMessagePublisher.publish(ErrorMessagePublisher.java:168)
at org.springframework.integration.handler.advice.ErrorMessageSendingRecoverer.recover(ErrorMessageSendingRecoverer.java:83)
at org.springframework.retry.support.RetryTemplate.handleRetryExhausted(RetryTemplate.java:539)
at org.springframework.retry.support.RetryTemplate.doExecute(RetryTemplate.java:387)
at org.springframework.retry.support.RetryTemplate.execute(RetryTemplate.java:255)
at org.springframework.kafka.listener.adapter.RetryingMessageListenerAdapter.onMessage(RetryingMessageListenerAdapter.java:119)
at org.springframework.kafka.listener.adapter.RetryingMessageListenerAdapter.onMessage(RetryingMessageListenerAdapter.java:42)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2629)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2609)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2536)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2427)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2305)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1979)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1364)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1355)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1247)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: org.apache.kafka.common.errors.SerializationException: Can't convert value of class java.lang.String to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:904)
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:861)
at org.springframework.kafka.core.DefaultKafkaProducerFactory$CloseSafeProducer.send(DefaultKafkaProducerFactory.java:993)
at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:649)
at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:429)
at org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler.handleRequestMessage(KafkaProducerMessageHandler.java:513)
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:136)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
... 62 common frames omitted
Caused by: java.lang.ClassCastException: class java.lang.String cannot be cast to class [B (java.lang.String and [B are in module java.base of loader 'bootstrap')
at org.apache.kafka.common.serialization.ByteArraySerializer.serialize(ByteArraySerializer.java:19)
at org.apache.kafka.common.serialization.Serializer.serialize(Serializer.java:62)
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:901)
... 69 common frames omitted
2022-04-20 04:36:34.107 ERROR 28644 --- [container-0-C-1] o.s.integration.handler.LoggingHandler : org.springframework.messaging.MessageHandlingException: error occurred in message handler [org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder$ProducerConfigurationMessageHandler#518e5340]; nested exception is org.apache.kafka.common.errors.SerializationException: Can't convert value of class reactor.core.publisher.FluxMapFuseable to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer, failedMessage=GenericMessage [payload=FluxMapFuseable, headers={aeg-delivery-count=[B#86b720c, aeg-event-type=[B#371ebea0, deliveryAttempt=3, aeg-output-event-id=[B#5b6b16cc, kafka_timestampType=CREATE_TIME, kafka_receivedTopic=deposit-assets, target-protocol=kafka, aeg-metadata-version=[B#76375b3f, kafka_offset=7813, aeg-subscription-name=[B#259fbce7, scst_nativeHeadersPresent=true, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#a629a5c, aeg-data-version=[B#6736ed4a, id=09a642f8-1a9b-0ce1-f8a9-331038295adc, kafka_receivedPartitionId=0, kafka_receivedTimestamp=1650429388041, contentType=application/json, app.name=[B#64684a16, kafka_groupId=foo, timestamp=1650429391672}]
at org.springframework.integration.support.utils.IntegrationUtils.wrapInHandlingExceptionIfNecessary(IntegrationUtils.java:191)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:65)
at org.springframework.cloud.stream.binder.AbstractMessageChannelBinder$SendingHandler.handleMessageInternal(AbstractMessageChannelBinder.java:1074)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
at org.springframework.integration.dispatcher.AbstractDispatcher.tryOptimizedDispatch(AbstractDispatcher.java:115)
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:133)
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractDestinationResolvingMessagingTemplate.send(AbstractDestinationResolvingMessagingTemplate.java:72)
at org.springframework.cloud.stream.function.FunctionConfiguration$FunctionToDestinationBinder$1.doSendMessage(FunctionConfiguration.java:619)
at org.springframework.cloud.stream.function.FunctionConfiguration$FunctionToDestinationBinder$1.handleMessageInternal(FunctionConfiguration.java:602)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
at org.springframework.integration.dispatcher.AbstractDispatcher.tryOptimizedDispatch(AbstractDispatcher.java:115)
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:133)
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:317)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:272)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:187)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:166)
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:109)
at org.springframework.integration.endpoint.MessageProducerSupport.sendMessage(MessageProducerSupport.java:216)
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter.sendMessageIfAny(KafkaMessageDrivenChannelAdapter.java:397)
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter.access$300(KafkaMessageDrivenChannelAdapter.java:83)
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter$IntegrationRecordMessageListener.onMessage(KafkaMessageDrivenChannelAdapter.java:454)
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter$IntegrationRecordMessageListener.onMessage(KafkaMessageDrivenChannelAdapter.java:428)
at org.springframework.kafka.listener.adapter.RetryingMessageListenerAdapter.lambda$onMessage$0(RetryingMessageListenerAdapter.java:125)
at org.springframework.retry.support.RetryTemplate.doExecute(RetryTemplate.java:329)
at org.springframework.retry.support.RetryTemplate.execute(RetryTemplate.java:255)
at org.springframework.kafka.listener.adapter.RetryingMessageListenerAdapter.onMessage(RetryingMessageListenerAdapter.java:119)
at org.springframework.kafka.listener.adapter.RetryingMessageListenerAdapter.onMessage(RetryingMessageListenerAdapter.java:42)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2629)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2609)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2536)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2427)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2305)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1979)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1364)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1355)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1247)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: org.apache.kafka.common.errors.SerializationException: Can't convert value of class reactor.core.publisher.FluxMapFuseable to class org.apache.kafka.common.serialization.ByteArraySerializer specified in value.serializer
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:904)
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:861)
at org.springframework.kafka.core.DefaultKafkaProducerFactory$CloseSafeProducer.send(DefaultKafkaProducerFactory.java:993)
at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:649)
at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:429)
at org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler.handleRequestMessage(KafkaProducerMessageHandler.java:513)
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:136)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:56)
... 47 more
Caused by: java.lang.ClassCastException: class reactor.core.publisher.FluxMapFuseable cannot be cast to class [B (reactor.core.publisher.FluxMapFuseable is in unnamed module of loader 'app'; [B is in module java.base of loader 'bootstrap')
at org.apache.kafka.common.serialization.ByteArraySerializer.serialize(ByteArraySerializer.java:19)
at org.apache.kafka.common.serialization.Serializer.serialize(Serializer.java:62)
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:901)
... 54 more
I have checked some Spring Cloud code source and especially this class org.springframework.cloud.function.context.catalog.SimpleFunctionRegistry and i have found out that this method is added fluxifyInputIfNecessary, I guess some streaming is happening, the message is converted to be send as an FluxMapFuseable
and why the process function is not even called ? because the error is occuring in the producer level
So any idea to how solve this problem ?
ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = latest
bootstrap.servers = [foo-server:9093]
check.crcs = true
client.dns.lookup = use_all_dns_ips
client.id = consumer-null-4
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = true
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = null
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = true
internal.throw.on.fetch.stable.offset.unsupported = false
isolation.level = read_uncommitted
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 500
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor, class org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = [hidden]
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = PLAIN
security.protocol = SASL_SSL
security.providers = null
send.buffer.bytes = 131072
session.timeout.ms = 45000
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.3
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
ProducerConfig values:
acks = 1
batch.size = 16384
bootstrap.servers = [foo-server:9093]
buffer.memory = 33554432
client.dns.lookup = use_all_dns_ips
client.id = producer-1
compression.type = none
connections.max.idle.ms = 540000
delivery.timeout.ms = 120000
enable.idempotence = false
interceptor.classes = []
key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer
linger.ms = 0
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.max.age.ms = 300000
metadata.max.idle.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = [hidden]
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = PLAIN
security.protocol = SASL_SSL
security.providers = null
send.buffer.bytes = 131072
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.3
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
transaction.timeout.ms = 60000
transactional.id = null
value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer

Spring Batch: Resource must not be null

I am new to spring batch framework. I am making a spring boot program to read from a XML file and store it in the MySQL database.
But the application is throwing the following error.
org.springframework.batch.item.ItemStreamException: Failed to initialize the reader
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.open(AbstractItemCountingItemStreamItemReader.java:153) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader$$FastClassBySpringCGLIB$$ebb633d0.invoke(<generated>) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) ~[spring-core-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:771) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:749) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.support.DelegatingIntroductionInterceptor.doProceed(DelegatingIntroductionInterceptor.java:136) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.support.DelegatingIntroductionInterceptor.invoke(DelegatingIntroductionInterceptor.java:124) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:749) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:691) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.batch.item.xml.StaxEventItemReader$$EnhancerBySpringCGLIB$$1a0f0ba5.open(<generated>) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.item.support.CompositeItemStream.open(CompositeItemStream.java:103) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.step.tasklet.TaskletStep.open(TaskletStep.java:311) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:205) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.job.SimpleStepHandler.handleStep(SimpleStepHandler.java:148) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.job.AbstractJob.handleStep(AbstractJob.java:410) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.job.SimpleJob.doExecute(SimpleJob.java:136) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.job.AbstractJob.execute(AbstractJob.java:319) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.core.launch.support.SimpleJobLauncher$1.run(SimpleJobLauncher.java:147) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:50) ~[spring-core-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:140) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:na]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:64) ~[na:na]
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:na]
at java.base/java.lang.reflect.Method.invoke(Method.java:564) ~[na:na]
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:344) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:198) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.batch.core.configuration.annotation.SimpleBatchConfiguration$PassthruAdvice.invoke(SimpleBatchConfiguration.java:127) ~[spring-batch-core-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:212) ~[spring-aop-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at com.sun.proxy.$Proxy39.run(Unknown Source) ~[na:na]
at org.springframework.boot.autoconfigure.batch.JobLauncherApplicationRunner.execute(JobLauncherApplicationRunner.java:199) ~[spring-boot-autoconfigure-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherApplicationRunner.executeLocalJobs(JobLauncherApplicationRunner.java:173) ~[spring-boot-autoconfigure-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherApplicationRunner.launchJobFromProperties(JobLauncherApplicationRunner.java:160) ~[spring-boot-autoconfigure-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherApplicationRunner.run(JobLauncherApplicationRunner.java:155) ~[spring-boot-autoconfigure-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.autoconfigure.batch.JobLauncherApplicationRunner.run(JobLauncherApplicationRunner.java:150) ~[spring-boot-autoconfigure-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:786) ~[spring-boot-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.SpringApplication.callRunners(SpringApplication.java:776) ~[spring-boot-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:322) ~[spring-boot-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1237) ~[spring-boot-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1226) ~[spring-boot-2.3.2.RELEASE.jar:2.3.2.RELEASE]
at com.swt.helloworld.HelloworldApplication.main(HelloworldApplication.java:41) ~[classes/:na]
Caused by: java.lang.IllegalArgumentException: The Resource must not be null.
at org.springframework.util.Assert.notNull(Assert.java:201) ~[spring-core-5.2.8.RELEASE.jar:5.2.8.RELEASE]
at org.springframework.batch.item.xml.StaxEventItemReader.doOpen(StaxEventItemReader.java:205) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.open(AbstractItemCountingItemStreamItemReader.java:150) ~[spring-batch-infrastructure-4.2.4.RELEASE.jar:4.2.4.RELEASE]
... 43 common frames omitted
I am not able to understand why the resource is null error is coming.I have also attached my BatchConfiguration.java file below:
package com.swt.helloworld.config;
import com.swt.helloworld.model.Product;
import com.swt.helloworld.writer.ConsoleItemWriter;
import com.swt.helloworld.listener.HwJobExecutionListener;
import com.swt.helloworld.listener.HwStepExecutionListener;
import com.swt.helloworld.processor.InMemeItemProcessor;
import com.swt.helloworld.reader.InMemReader;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.item.file.transform.LineTokenizer;
import org.springframework.batch.item.xml.StaxEventItemReader;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.FileSystemResource;
import org.springframework.oxm.jaxb.Jaxb2Marshaller;
#EnableBatchProcessing
#Configuration
public class BatchCondifguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
private HwJobExecutionListener hwJobExecutionListener;
#Autowired
private HwStepExecutionListener hwStepExecutionListener;
#Autowired
private InMemeItemProcessor inMemeItemProcessor;
public Tasklet helloWorldTasklet(){
return (new Tasklet() {
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
System.out.println("Hello world " );
return RepeatStatus.FINISHED;
}
});
}
#Bean
public Step step1(){
return steps.get("step1")
.listener(hwStepExecutionListener)
.tasklet(helloWorldTasklet())
.build();
}
#Bean
public InMemReader reader(){
return new InMemReader();
}
#StepScope
#Bean
public StaxEventItemReader xmlItemReader(
#Value( "#{jobParameters['fileInput']}" )
FileSystemResource inputFile
){
// where to read the xml file
StaxEventItemReader reader = new StaxEventItemReader();
reader.setResource(inputFile);
//need to let reader to know which tags describe the domain object
reader.setFragmentRootElementName("product");
// tell reader how to parse XML and which domain object to be mapped
reader.setUnmarshaller(new Jaxb2Marshaller(){
{
setClassesToBeBound(Product.class);
}
});
return reader;
}
#StepScope
#Bean
public FlatFileItemReader flatFileItemReader(
#Value( "#{jobParameters['fileInput']}" )
FileSystemResource inputFile ){
FlatFileItemReader reader = new FlatFileItemReader();
// step 1 let reader know where is the file
reader.setResource( inputFile );
//create the line Mapper
reader.setLineMapper(
new DefaultLineMapper<Product>(){
{
setLineTokenizer( new DelimitedLineTokenizer() {
{
setNames( new String[]{"prodId","productName","prodDesc","price","unit"});
setDelimiter("|");
}
});
setFieldSetMapper( new BeanWrapperFieldSetMapper<Product>(){
{
setTargetType(Product.class);
}
});
}
}
);
//step 3 tell reader to skip the header
reader.setLinesToSkip(1);
return reader;
}
#Bean
public Step step2(){
return steps.get("step2").
<Integer,Integer>chunk(3)
// .reader(flatFileItemReader( null ))
.reader(xmlItemReader(null))
.writer(new ConsoleItemWriter())
.build();
}
#Bean
public Job helloWorldJob(){
return jobs.get("helloWorldJob")
.incrementer(new RunIdIncrementer())
.listener(hwJobExecutionListener)
.start(step1())
.next(step2())
.build();
}
}
Following the stack trace, I think you need to check your "job parameter" named 'fileInput'. Enter the file path in your program parameter 'fileInput'.
Perhaps it works adding this part in you parameter when you run your batch application:
fileInput=/path/your/input/file
See futher at the Spring document.

SimpleAsyncUncaughtExceptionHandler : Unexpected exception occurred invoking async method during kafka junit tests

Scenario: I want to test my kafka producer against a Kafka Docker Container instead of KafkaEmbedded.
Goal: I want to start kafka container from my docker-compose before executing a simple producer test.
Searching around I found a plugin that can run docker-compose ( docker-compose-rule) and I am following its guidance.
My straight question: does this exception give a clue what is going wrong
2020-03-04 18:14:39.420 INFO 15372 --- [extShutdownHook] o.s.s.concurrent.ThreadPoolTaskExecutor : Shutting down ExecutorService 'taskExecutor'
2020-03-04 18:14:39.421 INFO 15372 --- [extShutdownHook] o.a.k.clients.producer.KafkaProducer : [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 30000 ms.
2020-03-04 18:14:39.428 ERROR 15372 --- [kaMsgExecutor-2] .a.i.SimpleAsyncUncaughtExceptionHandler : Unexpected exception occurred invoking async method: public void com.arquiteturareferencia.kafkaassyncsend.producer.Producer.send(java.lang.String,java.lang.String)
org.apache.kafka.common.errors.InterruptException: java.lang.InterruptedException
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:937) ~[kafka-clients-2.3.1.jar:na]
Surrounding doubt: I read from this logs firstly "Shutting down ExecutorService 'taskExecutor'" then "Closing the Kafka producer" and finally "[kaMsgExecutor-2] .a.i.SimpleAsyncUncaughtExceptionHandler". Does it mean the root cause is either ExecutorService or kafka producer isn't available at the moment of test? If so, what I have to settup in other to make it available while running the test?
Here is my Junit test:
import org.junit.ClassRule;
import org.junit.jupiter.api.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import com.arquiteturareferencia.kafkaassyncsend.producer.Producer;
import com.palantir.docker.compose.DockerComposeRule;
import com.palantir.docker.compose.connection.waiting.HealthChecks;
#RunWith(SpringRunner.class)
#SpringBootTest
class KafkaAssyncSendApplicationTests {
#Autowired
private Producer p;
#ClassRule
public static DockerComposeRule docker = DockerComposeRule.builder()
.file("src/test/resources/docker-compose.yml")
.waitingForService("kafka", HealthChecks.toHaveAllPortsOpen())
.build();
#Test
public void testSend() throws Exception {
p.send("test", "Messagem demonstrativa assincrona 2");
}
}
My producer config
#EnableAsync
#Configuration
public class KafkaProducerConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProducerConfig.class);
#Value("${kafka.brokers}")
private String servers;
#Bean
public Executor taskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(2);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(500);
executor.setThreadNamePrefix("KafkaMsgExecutor-");
executor.initialize();
return executor;
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonDeserializer.class);
return props;
}
}
Producer:
#Service
public class Producer {
private static final Logger LOGGER = LoggerFactory.getLogger(Producer.class);
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Async
public void send(String topic, String message) {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send(topic, message);
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
#Override
public void onSuccess(final SendResult<String, String> message) {
LOGGER.info("sent message= " + message + " with offset= " + message.getRecordMetadata().offset());
}
#Override
public void onFailure(final Throwable throwable) {
}
});
}
}
My docker-compose.yml in case it matters
version: "3.2"
services:
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181:2181"
kafka:
image: wurstmeister/kafka
ports:
- "9092:9092"
environment:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENERS:
"INTERNAL://kafka:9090,\
EXTERNAL://:9092"
KAFKA_ADVERTISED_LISTENERS:
"INTERNAL://kafka:9090,\
EXTERNAL://192.168.99.100:9092"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP:
"INTERNAL:PLAINTEXT,\
EXTERNAL:PLAINTEXT"
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
Full Exception logs while testing:
2020-03-04 18:14:28.132 INFO 15372 --- [ main] c.a.k.KafkaAssyncSendApplicationTests : Starting KafkaAssyncSendApplicationTests on SPANOT149 with PID 15372 (started by Cast in C:\WSs\pockafka\kafka-assync-send)
2020-03-04 18:14:28.135 INFO 15372 --- [ main] c.a.k.KafkaAssyncSendApplicationTests : No active profile set, falling back to default profiles: default
2020-03-04 18:14:29.072 INFO 15372 --- [ main] o.s.s.concurrent.ThreadPoolTaskExecutor : Initializing ExecutorService
2020-03-04 18:14:29.089 INFO 15372 --- [ main] o.s.s.concurrent.ThreadPoolTaskExecutor : Initializing ExecutorService 'taskExecutor'
2020-03-04 18:14:29.281 INFO 15372 --- [ main] c.a.k.KafkaAssyncSendApplicationTests : Started KafkaAssyncSendApplicationTests in 1.424 seconds (JVM running for 3.408)
2020-03-04 18:14:29.339 INFO 15372 --- [kaMsgExecutor-1] o.a.k.clients.producer.ProducerConfig : ProducerConfig values:
acks = 1
batch.size = 16384
bootstrap.servers = [192.168.99.100:9092]
buffer.memory = 33554432
client.dns.lookup = default
client.id =
compression.type = none
connections.max.idle.ms = 540000
delivery.timeout.ms = 120000
enable.idempotence = false
interceptor.classes = []
key.serializer = class org.apache.kafka.common.serialization.StringSerializer
linger.ms = 0
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
transaction.timeout.ms = 60000
transactional.id = null
value.serializer = class org.apache.kafka.common.serialization.StringSerializer
2020-03-04 18:14:29.527 INFO 15372 --- [kaMsgExecutor-1] o.a.kafka.common.utils.AppInfoParser : Kafka version: 2.3.1
2020-03-04 18:14:29.529 INFO 15372 --- [kaMsgExecutor-1] o.a.kafka.common.utils.AppInfoParser : Kafka commitId: 18a913733fb71c01
2020-03-04 18:14:29.529 INFO 15372 --- [kaMsgExecutor-1] o.a.kafka.common.utils.AppInfoParser : Kafka startTimeMs: 1583356469521
2020-03-04 18:14:39.420 INFO 15372 --- [extShutdownHook] o.s.s.concurrent.ThreadPoolTaskExecutor : Shutting down ExecutorService 'taskExecutor'
2020-03-04 18:14:39.421 INFO 15372 --- [extShutdownHook] o.a.k.clients.producer.KafkaProducer : [Producer clientId=producer-1] Closing the Kafka producer with timeoutMillis = 30000 ms.
2020-03-04 18:14:39.428 ERROR 15372 --- [kaMsgExecutor-2] .a.i.SimpleAsyncUncaughtExceptionHandler : Unexpected exception occurred invoking async method: public void com.arquiteturareferencia.kafkaassyncsend.producer.Producer.send(java.lang.String,java.lang.String)
org.apache.kafka.common.errors.InterruptException: java.lang.InterruptedException
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:937) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:856) ~[kafka-clients-2.3.1.jar:na]
at org.springframework.kafka.core.DefaultKafkaProducerFactory$CloseSafeProducer.send(DefaultKafkaProducerFactory.java:592) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:404) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:216) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at com.arquiteturareferencia.kafkaassyncsend.producer.Producer.send(Producer.java:23) ~[classes/:na]
at com.arquiteturareferencia.kafkaassyncsend.producer.Producer$$FastClassBySpringCGLIB$$78adf947.invoke(<generated>) ~[classes/:na]
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) ~[spring-core-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:769) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:747) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[na:na]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[na:na]
at java.base/java.lang.Thread.run(Thread.java:834) ~[na:na]
Caused by: java.lang.InterruptedException: null
at java.base/java.lang.Object.wait(Native Method) ~[na:na]
at org.apache.kafka.common.utils.SystemTime.waitObject(SystemTime.java:60) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.internals.ProducerMetadata.awaitUpdate(ProducerMetadata.java:97) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.waitOnMetadata(KafkaProducer.java:999) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:876) ~[kafka-clients-2.3.1.jar:na]
... 15 common frames omitted
2020-03-04 18:14:39.428 ERROR 15372 --- [kaMsgExecutor-1] .a.i.SimpleAsyncUncaughtExceptionHandler : Unexpected exception occurred invoking async method: public void com.arquiteturareferencia.kafkaassyncsend.producer.Producer.send(java.lang.String,java.lang.String)
org.apache.kafka.common.errors.InterruptException: java.lang.InterruptedException
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:937) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:856) ~[kafka-clients-2.3.1.jar:na]
at org.springframework.kafka.core.DefaultKafkaProducerFactory$CloseSafeProducer.send(DefaultKafkaProducerFactory.java:592) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at org.springframework.kafka.core.KafkaTemplate.doSend(KafkaTemplate.java:404) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at org.springframework.kafka.core.KafkaTemplate.send(KafkaTemplate.java:216) ~[spring-kafka-2.3.6.RELEASE.jar:2.3.6.RELEASE]
at com.arquiteturareferencia.kafkaassyncsend.producer.Producer.send(Producer.java:23) ~[classes/:na]
at com.arquiteturareferencia.kafkaassyncsend.producer.Producer$$FastClassBySpringCGLIB$$78adf947.invoke(<generated>) ~[classes/:na]
at org.springframework.cglib.proxy.MethodProxy.invoke(MethodProxy.java:218) ~[spring-core-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.invokeJoinpoint(CglibAopProxy.java:769) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:747) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at org.springframework.aop.interceptor.AsyncExecutionInterceptor.lambda$invoke$0(AsyncExecutionInterceptor.java:115) ~[spring-aop-5.2.4.RELEASE.jar:5.2.4.RELEASE]
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) ~[na:na]
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) ~[na:na]
at java.base/java.lang.Thread.run(Thread.java:834) ~[na:na]
Caused by: java.lang.InterruptedException: null
at java.base/java.lang.Object.wait(Native Method) ~[na:na]
at org.apache.kafka.common.utils.SystemTime.waitObject(SystemTime.java:60) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.internals.ProducerMetadata.awaitUpdate(ProducerMetadata.java:97) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.waitOnMetadata(KafkaProducer.java:999) ~[kafka-clients-2.3.1.jar:na]
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:876) ~[kafka-clients-2.3.1.jar:na]
... 15 common frames omitted

Demo Springboot application to connect to postgres database throws runtime nested exception

I am trying to write a demo program to connect to a postgres database and perform basic CRUD operations. I'm getting "org.springframework.beans.factory.UnsatisfiedDependencyException" while running the application. I'm very new to spring. Thank you so much for your help in advance.
TestApplication.java
package com.ranjana.test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.domain.EntityScan;
#SpringBootApplication
#EntityScan(basePackages = {"model"})
public class TestApplication {
public static void main(String[] args) {
SpringApplication.run(TestApplication.class, args);
}
}
Following is the model:
Employee.java
package com.ranjana.test.model;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import javax.persistence.*;
import java.util.Date;
#Entity
public class Employee {
//Employee Id
#Id
#GeneratedValue(strategy = GenerationType.AUTO)
private long id;
//Employee First Name
#Column(name = "first_name", nullable = false)
private String firstName;
//Employee Last Name
#Column(name = "last_name", nullable = false)
private String lastName;
//Employee Email Address
#Column(name = "email", nullable = false)
private String emailId;
//Employee Contact Number
#Column(name = "contact_number", nullable = false)
private String contactNumber;
//Creation Timestamp
#CreationTimestamp
#Temporal(TemporalType.TIMESTAMP)
#Column(name = "create_date_time", nullable = false)
private Date createDateTime;
//Update Timestamp
#UpdateTimestamp
#Temporal(TemporalType.TIMESTAMP)
#Column(name = "update_date_time", nullable = false)
private Date updateDateTime;
// Getters and setters
#Override
public String toString(){
return "Employee{"
+ "id = " + id + '\''
+ "firstName" + firstName + '\''
+ "lastName" + lastName + '\''
+ "email" + emailId + '\''
+ "phone" + contactNumber + '\''
+ "createDateTime" + createDateTime + '\''
+ "updateDateTime" + updateDateTime + '\''
+ "}";
}
}
This is the interface that extends the JpaRepository:
package com.ranjana.test.repositoy;
import com.ranjana.test.model.Employee;
import org.springframework.data.jpa.repository.JpaRepository;
public interface EmployeeRepo extends JpaRepository <Employee, Long>{ }
Finally the Controller:
package com.ranjana.test.controller;
import com.ranjana.test.repositoy.EmployeeRepo;
import com.ranjana.test.exception.ResourceNotFoundException;
import com.ranjana.test.model.Employee;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
#RestController
#RequestMapping("/api/v1")
public class EmployeeController {
#Autowired
private EmployeeRepo empRepo;
#GetMapping("/employees")
public List<Employee> getAllEmployees(){
return empRepo.findAll();
}
#GetMapping("/employee/{id}")
public ResponseEntity<Employee> getUsersById(#PathVariable(value = "id") Long emplId)
throws ResourceNotFoundException {
Employee employee = empRepo.findById(emplId).orElseThrow(() -> new ResourceNotFoundException("Employee not found for id: " + emplId));
return ResponseEntity.ok().body(employee);
}
#PostMapping("/employees")
public Employee createEmployee(#Valid #RequestBody Employee employee){
return empRepo.save(employee);
}
#PutMapping("/employee/{id}")
public ResponseEntity<Employee> updateEmployee(#PathVariable(value = "id") Long emplId, #Valid #RequestBody Employee empDetails)
throws ResourceNotFoundException{
Employee employee = empRepo.findById(emplId).orElseThrow(() -> new ResourceNotFoundException("Employee not found for id: " + emplId ));
employee.setFirstName(empDetails.getFirstName());
employee.setLastName(empDetails.getLastName());
employee.setEmailId(empDetails.getEmailId());
employee.setContactNumber(empDetails.getContactNumber());
employee.setUpdateDateTime(new Date());
final Employee updatedEmployee = empRepo.save(employee);
return ResponseEntity.ok(updatedEmployee);
}
#DeleteMapping("/employee/{id}")
public Map<String, Boolean> deleteEmployee(#PathVariable(value = "id") Long emplId)
throws Exception{
Employee employee = empRepo.findById(emplId).orElseThrow(() -> new ResourceNotFoundException("Employee not found for id: " + emplId));
empRepo.delete(employee);
Map <String, Boolean> response= new HashMap<>();
response.put("Deleted", Boolean.TRUE);
return response;
}
}
Please find the error log as follows:
2019-08-03 20:21:53.210 ERROR 7151 --- [ restartedMain] o.s.boot.SpringApplication : Application run failed
org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'employeeController': Unsatisfied dependency expressed through field 'empRepo'; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'employeeRepo': Invocation of init method failed; nested exception is java.lang.IllegalArgumentException: Not a managed type: class com.egen.test.model.Employee
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:596) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.annotation.InjectionMetadata.inject(InjectionMetadata.java:90) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor.postProcessProperties(AutowiredAnnotationBeanPostProcessor.java:374) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.populateBean(AbstractAutowireCapableBeanFactory.java:1411) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:592) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:515) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:320) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:222) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:318) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:199) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:845) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:877) ~[spring-context-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:549) ~[spring-context-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:140) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:742) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:389) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:311) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1213) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1202) ~[spring-boot-2.1.6.RELEASE.jar:2.1.6.RELEASE]
at com.egen.test.TestApplication.main(TestApplication.java:12) ~[classes/:na]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:na]
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:na]
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:na]
at java.base/java.lang.reflect.Method.invoke(Method.java:566) ~[na:na]
at org.springframework.boot.devtools.restart.RestartLauncher.run(RestartLauncher.java:49) ~[spring-boot-devtools-2.1.6.RELEASE.jar:2.1.6.RELEASE]
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'employeeRepo': Invocation of init method failed; nested exception is java.lang.IllegalArgumentException: Not a managed type: class com.egen.test.model.Employee
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1778) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:593) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:515) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:320) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:222) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:318) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:199) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:277) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1251) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1171) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.annotation.AutowiredAnnotationBeanPostProcessor$AutowiredFieldElement.inject(AutowiredAnnotationBeanPostProcessor.java:593) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
... 24 common frames omitted
Caused by: java.lang.IllegalArgumentException: Not a managed type: class com.egen.test.model.Employee
at org.hibernate.metamodel.internal.MetamodelImpl.managedType(MetamodelImpl.java:552) ~[hibernate-core-5.3.10.Final.jar:5.3.10.Final]
at org.springframework.data.jpa.repository.support.JpaMetamodelEntityInformation.<init>(JpaMetamodelEntityInformation.java:74) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaEntityInformationSupport.getEntityInformation(JpaEntityInformationSupport.java:66) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaRepositoryFactory.getEntityInformation(JpaRepositoryFactory.java:201) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaRepositoryFactory.getTargetRepository(JpaRepositoryFactory.java:151) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaRepositoryFactory.getTargetRepository(JpaRepositoryFactory.java:134) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaRepositoryFactory.getTargetRepository(JpaRepositoryFactory.java:65) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport.getRepository(RepositoryFactorySupport.java:305) ~[spring-data-commons-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport.lambda$afterPropertiesSet$5(RepositoryFactoryBeanSupport.java:297) ~[spring-data-commons-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.util.Lazy.getNullable(Lazy.java:211) ~[spring-data-commons-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.util.Lazy.get(Lazy.java:94) ~[spring-data-commons-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport.afterPropertiesSet(RepositoryFactoryBeanSupport.java:300) ~[spring-data-commons-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.data.jpa.repository.support.JpaRepositoryFactoryBean.afterPropertiesSet(JpaRepositoryFactoryBean.java:121) ~[spring-data-jpa-2.1.9.RELEASE.jar:2.1.9.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1837) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1774) ~[spring-beans-5.1.8.RELEASE.jar:5.1.8.RELEASE]
... 34 common frames omitted
Process finished with exit code 0
Need to update this line #EntityScan(basePackages = {"model"}) to #EntityScan(basePackages = {"com.ranjana.test.model"}) so that your application scans from package name properly.

Turning off exception logging in HikariPool

I have two databases that are connected to my application and therefore I have two connectors like that:
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(entityManagerFactoryRef = DMP_SAP_ENTITY_MANAGER_FACTORY_BEAN_NAME)
class SapHanaConnectionConfiguration #Autowired constructor(private val environment: Environment) {
private val driverClassName by lazy { environment.getProperty(DMP_SAP_DRIVER_CLASS_NAME) }
private val jdbcString by lazy { environment.getProperty(DMP_SAP_JDBC_STRING) }
private val username by lazy { environment.getProperty(DMP_SAP_USERNAME) }
private val password by lazy { environment.getProperty(DMP_SAP_PASSWORD) }
#Bean(name = [DMP_SAP_DATA_SOURCE_BEAN_NAME])
fun dataSource(): DataSource = DataSourceBuilder.create()
.driverClassName(driverClassName)
.url(jdbcString)
.username(username)
.password(password)
.build()
#Bean(name = [DMP_SAP_ENTITY_MANAGER_FACTORY_BEAN_NAME])
fun entityManagerFactory(
builder: EntityManagerFactoryBuilder,
#Qualifier(DMP_SAP_DATA_SOURCE_BEAN_NAME) dataSource: DataSource
): LocalContainerEntityManagerFactoryBean = builder
.dataSource(dataSource)
.packages(DMP_SAP_MODEL_PACKAGE)
.persistenceUnit(DMP_SAP_PERSISTENCE_UNIT_NAME)
.build()
#Bean(name = [DMP_SAP_TRANSACTION_MANAGER_BEAN_NAME])
fun transactionManager(#Qualifier(DMP_SAP_ENTITY_MANAGER_FACTORY_BEAN_NAME) entityManagerFactory: EntityManagerFactory): PlatformTransactionManager =
JpaTransactionManager(entityManagerFactory)
}
Problem is that this particular connector should be working only in production environment, because it connects to the database with clients data (which is located int the inner local network and it is not accessible outside of client's network). Right now while developing in my local environment both connectors start and therefore this SapHanaConnection throws exception because it can't establish connection to given DB.
2018-10-25 14:04:09.187 ERROR 13680 --- [n(14)-127.0.0.1] com.zaxxer.hikari.pool.HikariPool : HikariPool-2 - Exception during pool initialization.
com.sap.db.jdbc.exceptions.JDBCDriverException: SAP DBTech JDBC: Cannot connect to jdbc:sap://CLIENTS_MACHINE/ [Unknown host CLIENTS_MACHINE [CLIENTS_MACHINE], -709].
at com.sap.db.jdbc.exceptions.SQLExceptionSapDB._newInstance(SQLExceptionSapDB.java:126)
at com.sap.db.jdbc.exceptions.SQLExceptionSapDB._newInstance(SQLExceptionSapDB.java:203)
at com.sap.db.jdbc.exceptions.SQLExceptionSapDB.newInstance(SQLExceptionSapDB.java:50)
at com.sap.db.jdbc.DriverSapDB._connect(DriverSapDB.java:1404)
at com.sap.db.jdbc.DriverSapDB.connect(DriverSapDB.java:1102)
at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:117)
at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:123)
at com.zaxxer.hikari.pool.PoolBase.newConnection(PoolBase.java:365)
at com.zaxxer.hikari.pool.PoolBase.newPoolEntry(PoolBase.java:194)
at com.zaxxer.hikari.pool.HikariPool.createPoolEntry(HikariPool.java:460)
at com.zaxxer.hikari.pool.HikariPool.checkFailFast(HikariPool.java:534)
at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:115)
at com.zaxxer.hikari.HikariDataSource.getConnection(HikariDataSource.java:112)
at org.springframework.jdbc.datasource.DataSourceUtils.fetchConnection(DataSourceUtils.java:151)
at org.springframework.jdbc.datasource.DataSourceUtils.doGetConnection(DataSourceUtils.java:115)
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:78)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:324)
at org.springframework.boot.actuate.jdbc.DataSourceHealthIndicator.getProduct(DataSourceHealthIndicator.java:122)
at org.springframework.boot.actuate.jdbc.DataSourceHealthIndicator.doDataSourceHealthCheck(DataSourceHealthIndicator.java:109)
at org.springframework.boot.actuate.jdbc.DataSourceHealthIndicator.doHealthCheck(DataSourceHealthIndicator.java:104)
at org.springframework.boot.actuate.health.AbstractHealthIndicator.health(AbstractHealthIndicator.java:84)
at org.springframework.boot.actuate.health.CompositeHealthIndicator.health(CompositeHealthIndicator.java:68)
at org.springframework.boot.actuate.health.CompositeHealthIndicator.health(CompositeHealthIndicator.java:68)
at org.springframework.boot.actuate.health.HealthEndpoint.health(HealthEndpoint.java:47)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:223)
at org.springframework.boot.actuate.endpoint.invoke.reflect.ReflectiveOperationInvoker.invoke(ReflectiveOperationInvoker.java:76)
at org.springframework.boot.actuate.endpoint.annotation.AbstractDiscoveredOperation.invoke(AbstractDiscoveredOperation.java:61)
at org.springframework.boot.actuate.endpoint.jmx.EndpointMBean.invoke(EndpointMBean.java:104)
at org.springframework.boot.actuate.endpoint.jmx.EndpointMBean.invoke(EndpointMBean.java:93)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.invoke(DefaultMBeanServerInterceptor.java:819)
at com.sun.jmx.mbeanserver.JmxMBeanServer.invoke(JmxMBeanServer.java:801)
at javax.management.remote.rmi.RMIConnectionImpl.doOperation(RMIConnectionImpl.java:1468)
at javax.management.remote.rmi.RMIConnectionImpl.access$300(RMIConnectionImpl.java:76)
at javax.management.remote.rmi.RMIConnectionImpl$PrivilegedOperation.run(RMIConnectionImpl.java:1309)
at javax.management.remote.rmi.RMIConnectionImpl.doPrivilegedOperation(RMIConnectionImpl.java:1401)
at javax.management.remote.rmi.RMIConnectionImpl.invoke(RMIConnectionImpl.java:829)
at sun.reflect.GeneratedMethodAccessor193.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at sun.rmi.server.UnicastServerRef.dispatch(UnicastServerRef.java:357)
at sun.rmi.transport.Transport$1.run(Transport.java:200)
at sun.rmi.transport.Transport$1.run(Transport.java:197)
at java.security.AccessController.doPrivileged(Native Method)
at sun.rmi.transport.Transport.serviceCall(Transport.java:196)
at sun.rmi.transport.tcp.TCPTransport.handleMessages(TCPTransport.java:568)
at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run0(TCPTransport.java:826)
at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.lambda$run$0(TCPTransport.java:683)
at java.security.AccessController.doPrivileged(Native Method)
at sun.rmi.transport.tcp.TCPTransport$ConnectionHandler.run(TCPTransport.java:682)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
I would like to somehow say that this connector should run only while some profile is active, is it possible?
I'm using Spring Boot with Kotlin.

Resources