Spring Boot Kafka Manual ack fails - spring

I have a Spring Boot Kafka consumer with the below configuration . I was trying manual acknowledgment instead of auto commit . With manual acknowledgment I started getting error .
Spring Boot version is 2.7.2.
kafka.consumer.groupId=mcs-ccp-event
message.topic.name=mcs_ccp_test
kafka.bootstrapAddress=kafka-dev-app-a1.com:9092
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.listener.ack-mode=MANUAL_IMMEDIATE
and this consumer configurations
#EnableKafka
#Configuration
#Slf4j
public class KafkaConsumerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
#Value(value = "${kafka.consumer.groupId}")
private String groupId;
public ConsumerFactory<String, Event> eventConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
//props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, "com.xxx.mcsccpkafkaconsumer.vo.Event");
props.put(JsonDeserializer.USE_TYPE_INFO_HEADERS,false);
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(Event.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Event> eventKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Event> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(eventConsumerFactory());
return factory;
}
This is my listener
#KafkaListener(topics = "${message.topic.name}", containerFactory = "eventKafkaListenerContainerFactory", groupId = "${kafka.consumer.groupId}")
public void eventListener(#Payload Event event, #Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
Acknowledgment acknowledgment) {
log.info("Received event message: {} from partition : {}", event, partition);
persistEventToDB(event);
acknowledgment.acknowledge();
this.eventLatch.countDown();
}
Whenever consumer is receiving message from producer , its always throwing the error :
2022-08-06 11:16:11.749 ERROR 37700 --- [ntainer#0-0-C-1] o.s.kafka.listener.DefaultErrorHandler : Backoff none exhausted for mcs__ccp-1#122
org.springframework.kafka.listener.ListenerExecutionFailedException: invokeHandler Failed; nested exception is java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.; nested exception is java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2713) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2683) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2643) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2570) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2451) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2329) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:2000) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1373) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1364) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1255) ~[spring-kafka-2.8.8.jar:2.8.8]
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[na:na]
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
Suppressed: org.springframework.kafka.listener.ListenerExecutionFailedException: Restored Stack Trace
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.checkAckArg(MessagingMessageListenerAdapter.java:369) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:352) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:92) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:53) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2663) ~[spring-kafka-2.8.8.jar:2.8.8]
Caused by: java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.checkAckArg(MessagingMessageListenerAdapter.java:369) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:352) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:92) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:53) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2663) ~[spring-kafka-2.8.8.jar:2.8.8]
... 11 common frames omitted
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot handle message; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [com.xxx.mcsccpkafkaconsumer.vo.Event] to [org.springframework.kafka.support.Acknowledgment] for GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}], failedMessage=GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}]
... 15 common frames omitted
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot convert from [com.xxx.mcsccpkafkaconsumer.vo.Event] to [org.springframework.kafka.support.Acknowledgment] for GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}]
at org.springframework.messaging.handler.annotation.support.PayloadMethodArgumentResolver.resolveArgument(PayloadMethodArgumentResolver.java:145) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.kafka.annotation.KafkaNullAwarePayloadArgumentResolver.resolveArgument(KafkaNullAwarePayloadArgumentResolver.java:46) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolverComposite.resolveArgument(HandlerMethodArgumentResolverComposite.java:118) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.getMethodArgumentValues(InvocableHandlerMethod.java:147) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:115) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:56) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:347) ~[spring-kafka-2.8.8.jar:2.8.8]
... 14 common frames omitted

You're setting the ack-mode and auto-offset-reset in the properties file, which is used by Spring Boot's auto configuration to setup its own KafkaListenerContainerFactory.
But since then you declare your own KafkaListenerContainerFactory bean, auto configuration backs off, and your programatic configuration is used instead.
You can set the properties for your consumer factory directly in the properties file and let Spring Boot create the beans - then there's no need for this KafkaConsumerConfig class.
Or you can set the ack mode and auto-offset-reset directly in the factory bean you're declaring instead of the properties file.

Related

Spring boot sftp-integration with Apache mina throws Error - No such file

private static SessionFactory<DirEntry>
sftpSessionFactory(SftpProperties sftpProps,
String publickeyIterativePwd) {
DefaultSftpSessionFactory factory = new
DefaultSftpSessionFactory(true);
factory.setHost("testhost");
factory.setPort("port");
factory.setUser("username");
factory.setPassword("password");
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(factory, 200);
}
SftpRemoteFileTemplate template= new
SftpRemoteFileTemplate(sftpSessionFactory());
template.list('path on the server');
The template.list throws below exception
Caused by: java.lang.RuntimeException: SFTP error
(SSH_FX_NO_SUCH_FILE): No such file
at
org.apache.sshd.sftp.client.impl.SftpIterableDirEntry.iterator(Sftp
IterableDirEntry.java:66) ~[sshd-sftp-2.9.2.jar:2.9.2]
at org.apache.sshd.sftp.client.impl.SftpIterableDirEntry.iterator(SftpIterableDirEntry.java:34) ~[sshd-sftp-2.9.2.jar:2.9.2]
at java.base/java.lang.Iterable.spliterator(Iterable.java:101) ~[na:na]
at org.springframework.integration.sftp.session.SftpSession.doList(SftpSession.java:99) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:69) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:52) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.session.CachingSessionFactory$CachedSession.list(CachingSessionFactory.java:227) ~[spring-integration-file-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.RemoteFileTemplate.lambda$list$5(RemoteFileTemplate.java:422) ~[spring-integration-file-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.RemoteFileTemplate.execenter code hereute(RemoteFileTemplate.java:452) ~[spring-integration-file-6.0.0.jar:6.0.0]
... 95 common frames omitted
Caused by: org.apache.sshd.sftp.common.SftpException: No such file

Spring for Apache Kafka JSON Deserialization Exception Class Not Found

I'm trying to get a messages from Kafka topic, but for some reason I get the following error:
2022-06-28 14:17:52.044 INFO 1 --- [ntainer#0-0-C-1] o.a.k.clients.consumer.KafkaConsumer : [Consumer clientId=consumer-api1-1, groupId=api1] Seeking to offset 1957 for partition ActiveProxySources-0
2022-06-28T14:17:52.688451744Z 2022-06-28 14:17:52.687 ERROR 1 --- [ntainer#0-0-C-1] o.s.kafka.listener.DefaultErrorHandler : Backoff none exhausted for ActiveProxySources-0#1957
2022-06-28T14:17:52.688499949Z
2022-06-28T14:17:52.688511943Z org.springframework.kafka.listener.ListenerExecutionFailedException: Listener failed; nested exception is org.springframework.kafka.support.serializer.DeserializationException: failed to deserialize; nested exception is org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688544511Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2691) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688555996Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.checkDeser(KafkaMessageListenerContainer.java:2738) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688564633Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2612) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688573552Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2544) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688582961Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2429) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688591538Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2307) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688600362Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1981) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688610882Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1365) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688620353Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1356) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688629357Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1251) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688637662Z at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[na:na]
2022-06-28T14:17:52.688646009Z at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
2022-06-28T14:17:52.688655783Z at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
2022-06-28T14:17:52.688664349Z Caused by: org.springframework.kafka.support.serializer.DeserializationException: failed to deserialize; nested exception is org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688674537Z at org.springframework.kafka.support.serializer.SerializationUtils.deserializationException(SerializationUtils.java:150) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688683348Z at org.springframework.kafka.support.serializer.ErrorHandlingDeserializer.deserialize(ErrorHandlingDeserializer.java:204) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688699174Z at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1420) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688707618Z at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:134) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688718316Z at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1652) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688728359Z at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1800(Fetcher.java:1488) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688736716Z at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:721) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688748228Z at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:672) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688758573Z at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1304) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688768278Z at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1238) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688776576Z at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1211) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688785598Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollConsumer(KafkaMessageListenerContainer.java:1521) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688793960Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1511) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688802367Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1339) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688811023Z ... 4 common frames omitted
2022-06-28T14:17:52.688819230Z Caused by: org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688828306Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.getClassIdType(DefaultJackson2JavaTypeMapper.java:142) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688837754Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.toJavaType(DefaultJackson2JavaTypeMapper.java:103) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688846335Z at org.springframework.kafka.support.serializer.JsonDeserializer.deserialize(JsonDeserializer.java:572) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688854685Z at org.springframework.kafka.support.serializer.ErrorHandlingDeserializer.deserialize(ErrorHandlingDeserializer.java:201) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688862907Z ... 16 common frames omitted
2022-06-28T14:17:52.688870692Z Caused by: java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688888550Z at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:476) ~[na:na]
2022-06-28T14:17:52.688898662Z at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:589) ~[na:na]
2022-06-28T14:17:52.688907289Z at org.springframework.boot.loader.LaunchedURLClassLoader.loadClass(LaunchedURLClassLoader.java:151) ~[java.jar:na]
2022-06-28T14:17:52.688915418Z at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522) ~[na:na]
2022-06-28T14:17:52.688923583Z at java.base/java.lang.Class.forName0(Native Method) ~[na:na]
2022-06-28T14:17:52.688931577Z at java.base/java.lang.Class.forName(Class.java:398) ~[na:na]
2022-06-28T14:17:52.688939753Z at org.springframework.util.ClassUtils.forName(ClassUtils.java:284) ~[spring-core-5.3.19.jar!/:5.3.19]
2022-06-28T14:17:52.688948555Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.getClassIdType(DefaultJackson2JavaTypeMapper.java:138) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688957079Z ... 19 common frames omitted
2022-06-28T14:17:52.688964715Z
I have other applications that send and read messages on Kafka topics with the same settings and they all work fine, but not this application. Ideally, I want to read messages from two Kafka topics (messages in both topics look the same and contain the same objects), but even when I try to read messages from one topic, I get the error shown above.
The settings are follows:
class KafkaMessage {
String id
IdStatus status
}
#Service
#Slf4j
class ConsumerService {
Set<String> activeProxies = []
int getActiveProxiesNumber() {
activeProxies.size()
}
Set<String> activeProxySources = []
int getActiveProxySourcesNumber() {
activeProxySources.size()
}
#KafkaListener(topics = "ActiveProxies"/*, containerFactory = "KafkaListenerContainerFactoryActiveProxies"*/)
public void consumeProxyId(KafkaMessage message) {
log.info("Consuming ${message.id}: ${message.status}")
if (message.status == IdStatus.ADD) {
activeProxies.add(message.id)
}
if (message.status == IdStatus.DELETE) {
activeProxies.remove(message.id)
}
}
#KafkaListener(topics = "ActiveProxySources"/*, containerFactory = "KafkaListenerContainerFactoryActiveProxySources"*/)
public void consumeProxySourceId(KafkaMessage message) {
log.info("Consuming ${message.id}: ${message.status}")
if (message.status == IdStatus.ADD) {
activeProxySources.add(message.id)
}
if (message.status == IdStatus.DELETE) {
activeProxySources.remove(message.id)
}
}
}
TopicConfig:
#Configuration
public class TopicConfig {
#Value(value = "kafka:9092")
private String bootstrapAddress
#Value(value = "ActiveProxies")
private String activeProxies
#Value(value = "ActiveProxySources")
private String activeProxySources
// #Bean
// public KafkaAdmin kafkaAdmin() {
// Map<String, Object> configs = new HashMap<>();
// configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
// return new KafkaAdmin(configs);
// }
#Bean
public NewTopic ActiveProxiesTopic() {
return TopicBuilder.name(activeProxies)
.partitions(1)
.replicas(1)
.config(org.apache.kafka.common.config.TopicConfig.RETENTION_MS_CONFIG, "60000")
.build()
}
#Bean
public NewTopic ActiveProxySourcesTopic() {
return TopicBuilder.name(activeProxySources)
.partitions(1)
.replicas(1)
.config(org.apache.kafka.common.config.TopicConfig.RETENTION_MS_CONFIG, "60000")
.build()
}
}
application.properties file:
server.port=30329
spring.data.mongodb.database=free-proxy-engine
spring.kafka.bootstrap-servers=kafka:9092
spring.kafka.consumer.group-id=consumer-Api1
spring.kafka.consumer.properties.spring.json.trusted.packages=*
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
spring.kafka.consumer.properties.spring.deserializer.value.delegate.class=org.springframework.kafka.support.serializer.JsonDeserializer
I use Docker-Compose to run Kafka and all other applications
docker-compose.yaml:
version: '2'
services:
mongodb:
image: mongo:5.0.9
restart: unless-stopped
api:
image: openjdk:11
depends_on:
- mongodb
- kafka
restart: unless-stopped
volumes:
- ./libs/api-0.0.1-SNAPSHOT.jar:/gjava/java.jar
environment:
spring_data_mongodb_host: mongodb
spring_kafka_consumer_group-id: api1
command: /bin/bash -c "cd /gjava && chmod +x /gjava/*.jar && java -jar /gjava/java.jar"
ports:
- 30329:30329
zookeeper:
image: confluentinc/cp-zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka
restart: always
hostname: kafka
depends_on:
- zookeeper
container_name: kafka
ports:
- "9092:9092"
environment:
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
I created my own Consumer configuration file for Kafka, but the error remained when I tried to read messages from two topics and from one.
#EnableKafka
#Configuration
class KafkaConsumerConfig {
#Value(value = "kafka:9092")
private String bootstrapAddress
#Bean
public ConsumerFactory<String, KafkaMessage> ConsumerFactoryActiveProxies() {
Map<String, Object> props = new HashMap<>()
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress)
props.put(ConsumerConfig.GROUP_ID_CONFIG, "Api-1")
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*")
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, JsonDeserializer.class)
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class.getName())
return new DefaultKafkaConsumerFactory<>(props/*,
new StringDeserializer(),
new JsonDeserializer<>(KafkaMessage.class)*/)
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, KafkaMessage>
KafkaListenerContainerFactoryActiveProxies() {
ConcurrentKafkaListenerContainerFactory<String, KafkaMessage> factory
= new ConcurrentKafkaListenerContainerFactory<>()
factory.setConsumerFactory(ConsumerFactoryActiveProxies())
factory.setMessageConverter(new StringJsonMessageConverter())
return factory
}
#Bean
public ConsumerFactory<String, KafkaMessage> ConsumerFactoryActiveProxySources() {
Map<String, Object> props = new HashMap<>()
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress)
props.put(ConsumerConfig.GROUP_ID_CONFIG, "Api-2")
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*")
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, JsonDeserializer.class)
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class.getName())
return new DefaultKafkaConsumerFactory<>(props/*,
new StringDeserializer(),
new JsonDeserializer<>(KafkaMessage.class)*/)
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, KafkaMessage>
KafkaListenerContainerFactoryActiveProxySources() {
ConcurrentKafkaListenerContainerFactory<String, KafkaMessage> factory
= new ConcurrentKafkaListenerContainerFactory<>()
factory.setConsumerFactory(ConsumerFactoryActiveProxySources())
factory.setMessageConverter(new StringJsonMessageConverter())
return factory
}
}
I will be grateful for your help.
By default, the deserializer will use type information in headers to determine which type to create.
Caused by: java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
Most likely, KafkaMessage is in a different package on the sending side.
There are a couple of solutions:
https://docs.spring.io/spring-kafka/docs/current/reference/html/#serdes-json-config
Set JsonDeserializer.USE_TYPE_INFO_HEADERS to false and JsonDeserializer.VALUE_DEFAULT_TYPE to com.new.package.kafka.KafkaMessage (the fully qualified name of KafkaMessage on the receiving side).
Use type mapping: https://docs.spring.io/spring-kafka/docs/current/reference/html/#serdes-mapping-types
I suggest you read this whole section https://docs.spring.io/spring-kafka/docs/current/reference/html/#json-serde

SpelEvaluationException when initiating GET on a SftpOutboundGateway

I have a Spring Integration Flow starting with a SFTPOutboundGateway. It should get a file from a SFTP server. The file contains Event objects in JSON format. My configuration is:
#Bean
public DirectChannelSpec sftpGetInputChannel() {
return MessageChannels.direct();
}
#Bean
public QueueChannelSpec remoteFileOutputChannel() {
return MessageChannels.queue();
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(5000));
return pollerMetadata;
}
#Bean
public IntegrationFlow sftpGetFlow(TransferContext context) {
return IntegrationFlows.from("sftpGetInputChannel")
.handle(Sftp.outboundGateway(sftpSessionFactory(context.getChannel()),
AbstractRemoteFileOutboundGateway.Command.GET, "payload")
.remoteDirectoryExpression(context.getRemoteDir())
.localDirectory(new File(context.getLocalDir()))
.localFilenameExpression(context.getLocalFilename())
)
.channel("remoteFileOutputChannel")
.transform(Transformers.fromJson(Event[].class))
.get();
}
To get a file from the SFTP server I send a message to the gateway's input channel "sftpGetInputChannel":
boolean sent = sftpGetInputChannel.send(new GenericMessage<>(env.getRemoteDir() + "/" + env.getRemoteFilename()));
An SpelEvaluationException is thrown when trying to interprete the given filename. Both fields, remoteDir and remoteFilename are of type String:
org.springframework.messaging.MessageHandlingException: error occurred in message handler [bean 'sftpGetFlow.sftp:outbound-gateway#0' for component 'sftpGetFlow.org.springframework.integration.config.ConsumerEndpointFactoryBean#0'; defined in: 'class path resource [com/harry/potter/job/config/SftpConfiguration.class]'; from source: 'bean method sftpGetFlow']; nested exception is org.springframework.messaging.MessagingException: Failed to execute on session; nested exception is org.springframework.expression.spel.SpelEvaluationException: EL1001E: Type conversion problem, cannot convert from org.springframework.messaging.support.GenericMessage<?> to java.lang.String
at org.springframework.integration.support.utils.IntegrationUtils.wrapInHandlingExceptionIfNecessary(IntegrationUtils.java:192)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:79)
at org.springframework.integration.dispatcher.AbstractDispatcher.tryOptimizedDispatch(AbstractDispatcher.java:115)
at org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:133)
at org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:106)
at org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:72)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:570)
at org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:520)
at com.harry.potter.job.MyTask.getFile(MyEventsTask.java:170)
at com.harry.potter.job.myTask.runAsTask(MyEventsTask.java:112)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84)
at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54)
at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:93)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:836)
Caused by: org.springframework.messaging.MessagingException: Failed to execute on session; nested exception is org.springframework.expression.spel.SpelEvaluationException: EL1001E: Type conversion problem, cannot convert from org.springframework.messaging.support.GenericMessage<?> to java.lang.String
at org.springframework.integration.file.remote.RemoteFileTemplate.execute(RemoteFileTemplate.java:448)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway.doGet(AbstractRemoteFileOutboundGateway.java:680)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway.handleRequestMessage(AbstractRemoteFileOutboundGateway.java:584)
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:134)
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:62)
... 21 common frames omitted
Caused by: org.springframework.expression.spel.SpelEvaluationException: EL1001E: Type conversion problem, cannot convert from org.springframework.messaging.support.GenericMessage<?> to java.lang.String
at org.springframework.expression.spel.support.StandardTypeConverter.convertValue(StandardTypeConverter.java:75)
at org.springframework.expression.common.ExpressionUtils.convertTypedValue(ExpressionUtils.java:57)
at org.springframework.expression.spel.standard.SpelExpression.getValue(SpelExpression.java:377)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway.generateLocalFileName(AbstractRemoteFileOutboundGateway.java:1316)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway.get(AbstractRemoteFileOutboundGateway.java:1081)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway.lambda$doGet$6(AbstractRemoteFileOutboundGateway.java:681)
at org.springframework.integration.file.remote.gateway.AbstractRemoteFileOutboundGateway$$Lambda$30183/0x0000000000000000.doInSession(Unknown Source)
at org.springframework.integration.file.remote.RemoteFileTemplate.execute(RemoteFileTemplate.java:439)
... 25 common frames omitted
Caused by: org.springframework.core.convert.ConverterNotFoundException: No converter found capable of converting from type [org.springframework.messaging.support.GenericMessage<?>] to type [java.lang.String]
at org.springframework.core.convert.support.GenericConversionService.handleConverterNotFound(GenericConversionService.java:322)
at org.springframework.core.convert.support.GenericConversionService.convert(GenericConversionService.java:195)
at org.springframework.expression.spel.support.StandardTypeConverter.convertValue(StandardTypeConverter.java:70)
... 32 common frames omitted
What do I wrong?
Okay, the problem is not within the message to get the file but in the configuration of the gateway:
.localFilenameExpression(context.getLocalFilename())
Just to set the local filename as expression is not allowed because dots - often part of the filename - is a SpEL delimiter to separate bean from method name. Hence the expression becomes invalid.
Possible expression would be:
.localFilenameExpression("#remoteFileName")
See its JavaDocs:
/**
* Specify a SpEL expression for local files renaming after downloading.
* #param localFilenameExpression the SpEL expression to use.
* #return the Spec.
*/
public S localFilenameExpression(String localFilenameExpression) {
And here is some code snippet how it works:
private String generateLocalFileName(Message<?> message, String remoteFileName) {
if (this.localFilenameGeneratorExpression != null) {
EvaluationContext evaluationContext = ExpressionUtils.createStandardEvaluationContext(getBeanFactory());
evaluationContext.setVariable("remoteFileName", remoteFileName);
return this.localFilenameGeneratorExpression.getValue(evaluationContext, message, String.class);
}
return remoteFileName;
}
The expression you can provide should somehow be in the context of the currently downloaded remote file. Not sure what is yours static context.getLocalFilename(). You can build a local file name based on the request message and that remoteFileName variable contexts.
See more in docs: https://docs.spring.io/spring-integration/docs/current/reference/html/sftp.html#configuring-with-the-java-dsl-3
The sample over there is like this:
.localDirectoryExpression("'myDir/' + #remoteDirectory")
.localFilenameExpression("#remoteFileName.replaceFirst('sftpSource', 'localTarget')"))

Spring integration / GCP PubSub : channel subscriber lost

Everything is mostly in the title.
I have a specific channel to send data to PubSub, using Spring integration and this information about GCP PubSub
I don't have any problem locally, or on the QA env.
But, in the prod, I have the following error :
org.springframework.messaging.MessageDeliveryException: failed to send Message to channel
'pubSubFlow.channel#1'; nested exception is java.lang.IllegalStateException: The [bean
'pubSubFlow.channel#1'; defined in: 'class path resource [fr/auchan/lark/tracking/api/v1
/pubsub/PubSubRequestIntegration.class]'; from source: 'bean method pubSubFlow'] doesn't
have subscribers to accept messages
org.springframework.integration.support.utils.IntegrationUtils.wrapInDeliveryExceptionIfNecessary(IntegrationUtils.java:167) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:600) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:520) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.FluxMessageChannel.lambda$subscribeTo$2(FluxMessageChannel.java:83) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:189) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.runAsync(FluxPublishOn.java:439) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.run(FluxPublishOn.java:526) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.scheduler.WorkerTask.call(WorkerTask.java:84) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.scheduler.WorkerTask.call(WorkerTask.java:37) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
java.base/java.util.concurrent.FutureTask.run(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) ~[na:na]
java.base/java.lang.Thread.run(Unknown Source) ~[na:na]
Caused by: java.lang.IllegalStateException: The [bean 'pubSubFlow.channel#1'; defined in: 'class path resource [PubSubRequestIntegration.class]'; from source: 'bean method pubSubFlow'] doesn't have subscribers to accept messages
org.springframework.util.Assert.state(Assert.java:97) ~[spring-core-5.2.12.RELEASE.jar:5.2.12.RELEASE]
org.springframework.integration.channel.FluxMessageChannel.doSend(FluxMessageChannel.java:61) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:570) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
12 common frames omitted
Below is my channel declaration, and the use of ServiceActivator as written is the PubSub Guidelines (see the link above).
#Bean
public MessageChannel dataChannel() {
return MessageChannels.publishSubscribe(Executors.newCachedThreadPool()).get();
}
#Bean
public MessageChannel pubSubChannel() {
return MessageChannels.publishSubscribe(Executors.newCachedThreadPool()).get();
}
#Bean
public IntegrationFlow pubSubFlow(
MessageChannel dataChannel,
MessageChannel pubSubChannel) {
return IntegrationFlows
.from(dataChannel)
.fluxTransform(this::toPubSubFormat)
.channel(pubSubChannel)
.get();
}
#Bean
#ServiceActivator(inputChannel = "pubSubChannel")
public PubSubMessageHandler sendToPubSub(PubSubTemplate pubSubTemplate) {
PubSubMessageHandler adapter = new PubSubMessageHandler(pubSubTemplate,
pubSubIntegrationProperties.getTopic());
adapter.setPublishCallback(
new ListenableFutureCallback<>() {
#Override
public void onFailure(Throwable throwable) {
log.warn("There was the following error sending the message. " + throwable);
}
#Override
public void onSuccess(String result) {
log.debug("Message was sent via the outbound channel adapter to {} : {}", pubSubIntegrationProperties.getTopic(), result);
}
});
return adapter;
}
Did I miss something? Why is the pubSubChannel marked as having no subscribers?
Thanks for the help

#RabbitListener is not working with queue names pattern

Me using spring boot for my project where i need to listen few queues at run time with pattern enables in rabbitlister.
i have tried #RabbitListener(queues="queue*").
but i throws exception with constant should be used in queue name.
Any help would be appreciated .
And also i have query with json convertor in spring boot rabbitmq:
#Bean
SimpleMessageListenerContainer container(ConnectionFactory connectionFactory, MessageListenerAdapter listenerAdapter,Queue notificationQueue) {
SimpleMessageListenerContainer container = new SimpleMessageListenerContainer();
container.setConnectionFactory(connectionFactory);
container.setQueueNames(notificationQueue.getName());
container.setMessageListener(listenerAdapter);
return container;
}
#Bean
Reciver receiver() {
return new Reciver ();
}
#Bean
MessageListenerAdapter listenerAdapter(Reciver receiver) {
return new MessageListenerAdapter(receiver,"receiveMethod");
}
And IN Reciever
receiveMethod(Message msg){
//Code
}
Here i get exception says that
org.springframework.amqp.rabbit.listener.exception.ListenerExecutionFailedException: Failed to invoke target method 'onMessage' with argument type = [class [B], value = [{[B#4ed663ad}]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.invokeListenerMethod(MessageListenerAdapter.java:408) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.onMessage(MessageListenerAdapter.java:298) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.doInvokeListener(AbstractMessageListenerContainer.java:757) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.invokeListener(AbstractMessageListenerContainer.java:680) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.access$001(SimpleMessageListenerContainer.java:93) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer$1.invokeListener(SimpleMessageListenerContainer.java:183) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.invokeListener(SimpleMessageListenerContainer.java:1358) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.executeListener(AbstractMessageListenerContainer.java:661) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.doReceiveAndExecute(SimpleMessageListenerContainer.java:1102) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.receiveAndExecute(SimpleMessageListenerContainer.java:1086) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.access$1100(SimpleMessageListenerContainer.java:93) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer$AsyncMessageProcessingConsumer.run(SimpleMessageListenerContainer.java:1203) [spring-rabbit-1.5.5.RELEASE.jar:na]
at java.lang.Thread.run(Unknown Source) [na:1.8.0_77]
Caused by: java.lang.NoSuchMethodException: com.mdm.amqp.MDMNotificationListener.onMessage([B)
at java.lang.Class.getMethod(Unknown Source) ~[na:1.8.0_77]
at org.springframework.util.MethodInvoker.prepare(MethodInvoker.java:174) ~[spring-core-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.invokeListenerMethod(MessageListenerAdapter.java:386) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
... 12 common frames omitted

Resources