Spring for Apache Kafka JSON Deserialization Exception Class Not Found - spring-boot

I'm trying to get a messages from Kafka topic, but for some reason I get the following error:
2022-06-28 14:17:52.044 INFO 1 --- [ntainer#0-0-C-1] o.a.k.clients.consumer.KafkaConsumer : [Consumer clientId=consumer-api1-1, groupId=api1] Seeking to offset 1957 for partition ActiveProxySources-0
2022-06-28T14:17:52.688451744Z 2022-06-28 14:17:52.687 ERROR 1 --- [ntainer#0-0-C-1] o.s.kafka.listener.DefaultErrorHandler : Backoff none exhausted for ActiveProxySources-0#1957
2022-06-28T14:17:52.688499949Z
2022-06-28T14:17:52.688511943Z org.springframework.kafka.listener.ListenerExecutionFailedException: Listener failed; nested exception is org.springframework.kafka.support.serializer.DeserializationException: failed to deserialize; nested exception is org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688544511Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2691) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688555996Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.checkDeser(KafkaMessageListenerContainer.java:2738) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688564633Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2612) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688573552Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2544) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688582961Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2429) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688591538Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2307) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688600362Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1981) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688610882Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1365) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688620353Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1356) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688629357Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1251) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688637662Z at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[na:na]
2022-06-28T14:17:52.688646009Z at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
2022-06-28T14:17:52.688655783Z at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
2022-06-28T14:17:52.688664349Z Caused by: org.springframework.kafka.support.serializer.DeserializationException: failed to deserialize; nested exception is org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688674537Z at org.springframework.kafka.support.serializer.SerializationUtils.deserializationException(SerializationUtils.java:150) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688683348Z at org.springframework.kafka.support.serializer.ErrorHandlingDeserializer.deserialize(ErrorHandlingDeserializer.java:204) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688699174Z at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1420) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688707618Z at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:134) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688718316Z at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1652) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688728359Z at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1800(Fetcher.java:1488) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688736716Z at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:721) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688748228Z at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:672) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688758573Z at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1304) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688768278Z at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1238) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688776576Z at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1211) ~[kafka-clients-3.0.1.jar!/:na]
2022-06-28T14:17:52.688785598Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollConsumer(KafkaMessageListenerContainer.java:1521) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688793960Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1511) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688802367Z at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1339) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688811023Z ... 4 common frames omitted
2022-06-28T14:17:52.688819230Z Caused by: org.springframework.messaging.converter.MessageConversionException: failed to resolve class name. Class not found [com.freeproxy.parser.model.kafka.KafkaMessage]; nested exception is java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688828306Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.getClassIdType(DefaultJackson2JavaTypeMapper.java:142) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688837754Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.toJavaType(DefaultJackson2JavaTypeMapper.java:103) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688846335Z at org.springframework.kafka.support.serializer.JsonDeserializer.deserialize(JsonDeserializer.java:572) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688854685Z at org.springframework.kafka.support.serializer.ErrorHandlingDeserializer.deserialize(ErrorHandlingDeserializer.java:201) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688862907Z ... 16 common frames omitted
2022-06-28T14:17:52.688870692Z Caused by: java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
2022-06-28T14:17:52.688888550Z at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:476) ~[na:na]
2022-06-28T14:17:52.688898662Z at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:589) ~[na:na]
2022-06-28T14:17:52.688907289Z at org.springframework.boot.loader.LaunchedURLClassLoader.loadClass(LaunchedURLClassLoader.java:151) ~[java.jar:na]
2022-06-28T14:17:52.688915418Z at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522) ~[na:na]
2022-06-28T14:17:52.688923583Z at java.base/java.lang.Class.forName0(Native Method) ~[na:na]
2022-06-28T14:17:52.688931577Z at java.base/java.lang.Class.forName(Class.java:398) ~[na:na]
2022-06-28T14:17:52.688939753Z at org.springframework.util.ClassUtils.forName(ClassUtils.java:284) ~[spring-core-5.3.19.jar!/:5.3.19]
2022-06-28T14:17:52.688948555Z at org.springframework.kafka.support.mapping.DefaultJackson2JavaTypeMapper.getClassIdType(DefaultJackson2JavaTypeMapper.java:138) ~[spring-kafka-2.8.5.jar!/:2.8.5]
2022-06-28T14:17:52.688957079Z ... 19 common frames omitted
2022-06-28T14:17:52.688964715Z
I have other applications that send and read messages on Kafka topics with the same settings and they all work fine, but not this application. Ideally, I want to read messages from two Kafka topics (messages in both topics look the same and contain the same objects), but even when I try to read messages from one topic, I get the error shown above.
The settings are follows:
class KafkaMessage {
String id
IdStatus status
}
#Service
#Slf4j
class ConsumerService {
Set<String> activeProxies = []
int getActiveProxiesNumber() {
activeProxies.size()
}
Set<String> activeProxySources = []
int getActiveProxySourcesNumber() {
activeProxySources.size()
}
#KafkaListener(topics = "ActiveProxies"/*, containerFactory = "KafkaListenerContainerFactoryActiveProxies"*/)
public void consumeProxyId(KafkaMessage message) {
log.info("Consuming ${message.id}: ${message.status}")
if (message.status == IdStatus.ADD) {
activeProxies.add(message.id)
}
if (message.status == IdStatus.DELETE) {
activeProxies.remove(message.id)
}
}
#KafkaListener(topics = "ActiveProxySources"/*, containerFactory = "KafkaListenerContainerFactoryActiveProxySources"*/)
public void consumeProxySourceId(KafkaMessage message) {
log.info("Consuming ${message.id}: ${message.status}")
if (message.status == IdStatus.ADD) {
activeProxySources.add(message.id)
}
if (message.status == IdStatus.DELETE) {
activeProxySources.remove(message.id)
}
}
}
TopicConfig:
#Configuration
public class TopicConfig {
#Value(value = "kafka:9092")
private String bootstrapAddress
#Value(value = "ActiveProxies")
private String activeProxies
#Value(value = "ActiveProxySources")
private String activeProxySources
// #Bean
// public KafkaAdmin kafkaAdmin() {
// Map<String, Object> configs = new HashMap<>();
// configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
// return new KafkaAdmin(configs);
// }
#Bean
public NewTopic ActiveProxiesTopic() {
return TopicBuilder.name(activeProxies)
.partitions(1)
.replicas(1)
.config(org.apache.kafka.common.config.TopicConfig.RETENTION_MS_CONFIG, "60000")
.build()
}
#Bean
public NewTopic ActiveProxySourcesTopic() {
return TopicBuilder.name(activeProxySources)
.partitions(1)
.replicas(1)
.config(org.apache.kafka.common.config.TopicConfig.RETENTION_MS_CONFIG, "60000")
.build()
}
}
application.properties file:
server.port=30329
spring.data.mongodb.database=free-proxy-engine
spring.kafka.bootstrap-servers=kafka:9092
spring.kafka.consumer.group-id=consumer-Api1
spring.kafka.consumer.properties.spring.json.trusted.packages=*
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.ErrorHandlingDeserializer
spring.kafka.consumer.properties.spring.deserializer.value.delegate.class=org.springframework.kafka.support.serializer.JsonDeserializer
I use Docker-Compose to run Kafka and all other applications
docker-compose.yaml:
version: '2'
services:
mongodb:
image: mongo:5.0.9
restart: unless-stopped
api:
image: openjdk:11
depends_on:
- mongodb
- kafka
restart: unless-stopped
volumes:
- ./libs/api-0.0.1-SNAPSHOT.jar:/gjava/java.jar
environment:
spring_data_mongodb_host: mongodb
spring_kafka_consumer_group-id: api1
command: /bin/bash -c "cd /gjava && chmod +x /gjava/*.jar && java -jar /gjava/java.jar"
ports:
- 30329:30329
zookeeper:
image: confluentinc/cp-zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka
restart: always
hostname: kafka
depends_on:
- zookeeper
container_name: kafka
ports:
- "9092:9092"
environment:
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
I created my own Consumer configuration file for Kafka, but the error remained when I tried to read messages from two topics and from one.
#EnableKafka
#Configuration
class KafkaConsumerConfig {
#Value(value = "kafka:9092")
private String bootstrapAddress
#Bean
public ConsumerFactory<String, KafkaMessage> ConsumerFactoryActiveProxies() {
Map<String, Object> props = new HashMap<>()
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress)
props.put(ConsumerConfig.GROUP_ID_CONFIG, "Api-1")
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*")
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, JsonDeserializer.class)
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class.getName())
return new DefaultKafkaConsumerFactory<>(props/*,
new StringDeserializer(),
new JsonDeserializer<>(KafkaMessage.class)*/)
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, KafkaMessage>
KafkaListenerContainerFactoryActiveProxies() {
ConcurrentKafkaListenerContainerFactory<String, KafkaMessage> factory
= new ConcurrentKafkaListenerContainerFactory<>()
factory.setConsumerFactory(ConsumerFactoryActiveProxies())
factory.setMessageConverter(new StringJsonMessageConverter())
return factory
}
#Bean
public ConsumerFactory<String, KafkaMessage> ConsumerFactoryActiveProxySources() {
Map<String, Object> props = new HashMap<>()
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress)
props.put(ConsumerConfig.GROUP_ID_CONFIG, "Api-2")
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
props.put(JsonDeserializer.TRUSTED_PACKAGES, "*")
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ErrorHandlingDeserializer.class)
props.put(ErrorHandlingDeserializer.KEY_DESERIALIZER_CLASS, JsonDeserializer.class)
props.put(ErrorHandlingDeserializer.VALUE_DESERIALIZER_CLASS, JsonDeserializer.class.getName())
return new DefaultKafkaConsumerFactory<>(props/*,
new StringDeserializer(),
new JsonDeserializer<>(KafkaMessage.class)*/)
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, KafkaMessage>
KafkaListenerContainerFactoryActiveProxySources() {
ConcurrentKafkaListenerContainerFactory<String, KafkaMessage> factory
= new ConcurrentKafkaListenerContainerFactory<>()
factory.setConsumerFactory(ConsumerFactoryActiveProxySources())
factory.setMessageConverter(new StringJsonMessageConverter())
return factory
}
}
I will be grateful for your help.

By default, the deserializer will use type information in headers to determine which type to create.
Caused by: java.lang.ClassNotFoundException: com.freeproxy.parser.model.kafka.KafkaMessage
Most likely, KafkaMessage is in a different package on the sending side.
There are a couple of solutions:
https://docs.spring.io/spring-kafka/docs/current/reference/html/#serdes-json-config
Set JsonDeserializer.USE_TYPE_INFO_HEADERS to false and JsonDeserializer.VALUE_DEFAULT_TYPE to com.new.package.kafka.KafkaMessage (the fully qualified name of KafkaMessage on the receiving side).
Use type mapping: https://docs.spring.io/spring-kafka/docs/current/reference/html/#serdes-mapping-types
I suggest you read this whole section https://docs.spring.io/spring-kafka/docs/current/reference/html/#json-serde

Related

Spring boot sftp-integration with Apache mina throws Error - No such file

private static SessionFactory<DirEntry>
sftpSessionFactory(SftpProperties sftpProps,
String publickeyIterativePwd) {
DefaultSftpSessionFactory factory = new
DefaultSftpSessionFactory(true);
factory.setHost("testhost");
factory.setPort("port");
factory.setUser("username");
factory.setPassword("password");
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(factory, 200);
}
SftpRemoteFileTemplate template= new
SftpRemoteFileTemplate(sftpSessionFactory());
template.list('path on the server');
The template.list throws below exception
Caused by: java.lang.RuntimeException: SFTP error
(SSH_FX_NO_SUCH_FILE): No such file
at
org.apache.sshd.sftp.client.impl.SftpIterableDirEntry.iterator(Sftp
IterableDirEntry.java:66) ~[sshd-sftp-2.9.2.jar:2.9.2]
at org.apache.sshd.sftp.client.impl.SftpIterableDirEntry.iterator(SftpIterableDirEntry.java:34) ~[sshd-sftp-2.9.2.jar:2.9.2]
at java.base/java.lang.Iterable.spliterator(Iterable.java:101) ~[na:na]
at org.springframework.integration.sftp.session.SftpSession.doList(SftpSession.java:99) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:69) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.sftp.session.SftpSession.list(SftpSession.java:52) ~[spring-integration-sftp-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.session.CachingSessionFactory$CachedSession.list(CachingSessionFactory.java:227) ~[spring-integration-file-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.RemoteFileTemplate.lambda$list$5(RemoteFileTemplate.java:422) ~[spring-integration-file-6.0.0.jar:6.0.0]
at org.springframework.integration.file.remote.RemoteFileTemplate.execenter code hereute(RemoteFileTemplate.java:452) ~[spring-integration-file-6.0.0.jar:6.0.0]
... 95 common frames omitted
Caused by: org.apache.sshd.sftp.common.SftpException: No such file

Spring Boot Kafka Manual ack fails

I have a Spring Boot Kafka consumer with the below configuration . I was trying manual acknowledgment instead of auto commit . With manual acknowledgment I started getting error .
Spring Boot version is 2.7.2.
kafka.consumer.groupId=mcs-ccp-event
message.topic.name=mcs_ccp_test
kafka.bootstrapAddress=kafka-dev-app-a1.com:9092
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.listener.ack-mode=MANUAL_IMMEDIATE
and this consumer configurations
#EnableKafka
#Configuration
#Slf4j
public class KafkaConsumerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
#Value(value = "${kafka.consumer.groupId}")
private String groupId;
public ConsumerFactory<String, Event> eventConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
//props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(JsonDeserializer.VALUE_DEFAULT_TYPE, "com.xxx.mcsccpkafkaconsumer.vo.Event");
props.put(JsonDeserializer.USE_TYPE_INFO_HEADERS,false);
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new JsonDeserializer<>(Event.class));
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Event> eventKafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Event> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(eventConsumerFactory());
return factory;
}
This is my listener
#KafkaListener(topics = "${message.topic.name}", containerFactory = "eventKafkaListenerContainerFactory", groupId = "${kafka.consumer.groupId}")
public void eventListener(#Payload Event event, #Header(KafkaHeaders.RECEIVED_PARTITION_ID) int partition,
Acknowledgment acknowledgment) {
log.info("Received event message: {} from partition : {}", event, partition);
persistEventToDB(event);
acknowledgment.acknowledge();
this.eventLatch.countDown();
}
Whenever consumer is receiving message from producer , its always throwing the error :
2022-08-06 11:16:11.749 ERROR 37700 --- [ntainer#0-0-C-1] o.s.kafka.listener.DefaultErrorHandler : Backoff none exhausted for mcs__ccp-1#122
org.springframework.kafka.listener.ListenerExecutionFailedException: invokeHandler Failed; nested exception is java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.; nested exception is java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2713) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2683) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:2643) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:2570) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:2451) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:2329) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:2000) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1373) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1364) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1255) ~[spring-kafka-2.8.8.jar:2.8.8]
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) ~[na:na]
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) ~[na:na]
at java.base/java.lang.Thread.run(Thread.java:829) ~[na:na]
Suppressed: org.springframework.kafka.listener.ListenerExecutionFailedException: Restored Stack Trace
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.checkAckArg(MessagingMessageListenerAdapter.java:369) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:352) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:92) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:53) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2663) ~[spring-kafka-2.8.8.jar:2.8.8]
Caused by: java.lang.IllegalStateException: No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.checkAckArg(MessagingMessageListenerAdapter.java:369) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:352) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:92) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:53) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:2663) ~[spring-kafka-2.8.8.jar:2.8.8]
... 11 common frames omitted
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot handle message; nested exception is org.springframework.messaging.converter.MessageConversionException: Cannot convert from [com.xxx.mcsccpkafkaconsumer.vo.Event] to [org.springframework.kafka.support.Acknowledgment] for GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}], failedMessage=GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}]
... 15 common frames omitted
Caused by: org.springframework.messaging.converter.MessageConversionException: Cannot convert from [com.xxx.mcsccpkafkaconsumer.vo.Event] to [org.springframework.kafka.support.Acknowledgment] for GenericMessage [payload=Event(eventType=Download, timestamp=2022-08-05 19:11:12, username=xxxxxx, browser=Chrome, eventDetails=EventDetails(objectName=VW_Attachment, recordType=ELA,EULA, agreementStatus=null, searchCategory=null, searchKeyword=null, downloadType=PDF, templateId=null, fileName=null, agreementNumber=null)), headers={kafka_offset=122, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#5c5b32a5, kafka_timestampType=CREATE_TIME, kafka_receivedPartitionId=1, kafka_receivedTopic=mcs_ccp_test, kafka_receivedTimestamp=1659764771420, kafka_groupId=mcs-ccp-event}]
at org.springframework.messaging.handler.annotation.support.PayloadMethodArgumentResolver.resolveArgument(PayloadMethodArgumentResolver.java:145) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.kafka.annotation.KafkaNullAwarePayloadArgumentResolver.resolveArgument(KafkaNullAwarePayloadArgumentResolver.java:46) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolverComposite.resolveArgument(HandlerMethodArgumentResolverComposite.java:118) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.getMethodArgumentValues(InvocableHandlerMethod.java:147) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:115) ~[spring-messaging-5.3.22.jar:5.3.22]
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:56) ~[spring-kafka-2.8.8.jar:2.8.8]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:347) ~[spring-kafka-2.8.8.jar:2.8.8]
... 14 common frames omitted
You're setting the ack-mode and auto-offset-reset in the properties file, which is used by Spring Boot's auto configuration to setup its own KafkaListenerContainerFactory.
But since then you declare your own KafkaListenerContainerFactory bean, auto configuration backs off, and your programatic configuration is used instead.
You can set the properties for your consumer factory directly in the properties file and let Spring Boot create the beans - then there's no need for this KafkaConsumerConfig class.
Or you can set the ack mode and auto-offset-reset directly in the factory bean you're declaring instead of the properties file.

Why spring-boot with Kafka failed to start?

There is spring-boot application with kafka dependecy,
there are two Kafka topics and need to read messages from them
tacocloud.orders.topic
tacocloud.tacos.topic
And already successful sent messages in it
Configured kafka config for listen this topics like this
package com.example.tacocloud.config;
import com.example.tacocloud.model.jpa.Order;
import com.example.tacocloud.model.jpa.Taco;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import java.util.HashMap;
import java.util.Map;
#Slf4j
#Configuration
#EnableKafka
#EnableConfigurationProperties
public class KafkaConfig {
// Order
#Bean
#ConfigurationProperties("spring.kafka.consumer.order")
public Map<String, Object> orderConsumerConfig() {
return new HashMap<>();
}
#Bean
public DefaultKafkaConsumerFactory<Object, Order> orderConsumerFactory(#Qualifier("orderConsumerConfig")
Map<String, Object> orderConsumerConfig) {
return new DefaultKafkaConsumerFactory<>(orderConsumerConfig);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Order> order1KafkaMessageListenerContainer(
#Qualifier("orderConsumerConfig") Map<String, Object> orderConsumerConfig,
#Qualifier("orderConsumerFactory") DefaultKafkaConsumerFactory orderConsumerFactory) {
ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(orderConsumerFactory);
return factory;
}
// Taco
#Bean
#ConfigurationProperties("spring.kafka.consumer.taco")
public Map<String, Object> tacoConsumerConfig() {
return new HashMap<>();
}
#Bean
public DefaultKafkaConsumerFactory tacoConsumerFactory(
#Qualifier("tacoConsumerConfig") Map<String, Object> tacoConsumerConfig) {
return new DefaultKafkaConsumerFactory<>(tacoConsumerConfig);
}
#Bean
public ConcurrentKafkaListenerContainerFactory tacoConcurrentMessageListenerContainer(
#Qualifier("tacoConsumerConfig") Map<String, Object> tacoConsumerConfig,
#Qualifier("tacoConsumerFactory") DefaultKafkaConsumerFactory tacoConsumerFactory) {
ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(tacoConsumerFactory);
return factory;
}
}
So, there are two DefaultKafkaConsumerFactory and two ConcurrentKafkaListenerContainerFactory
Aften that, created a service with #KafkaListener log messages:
package com.example.tacocloud.service;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
#Service
#EnableKafka
public class KafkaService {
#KafkaListener(topics = "tacocloud.orders.topic", groupId = "one")
public void order() {
System.out.println("Order");
}
#KafkaListener(topics ="tacocloud.tacos.topic", groupId = "two")
public void taco() {
System.out.println("Taco");
}
}
application.yml file
spring:
kafka:
consumer:
order:
topic: tacocloud.orders.topic
"[bootstrap.servers]": localhost:29888
"[key.deserializer]": org.apache.kafka.common.serialization.StringDeserializer
"[value.deserializer]": com.example.tacocloud.model.serialization.OrderDeserializer
template:
"[default.topic]": tacocloud.orders.topic
taco:
topic: tacocloud.tacos.topic
"[bootstrap.servers]": localhost:29888
"[key.deserializer]": org.apache.kafka.common.serialization.StringDeserializer
"[value.deserializer]": com.example.tacocloud.model.serialization.TacoDeserializer
template:
"[default.topic]": tacocloud.tacos.topic
But, when I start my spring-boot application, I see error message(((
2022-04-15 21:38:35.918 ERROR 13888 --- [ restartedMain]
o.s.boot.SpringApplication : Application run failed
org.springframework.context.ApplicationContextException: Failed to
start bean
'org.springframework.kafka.config.internalKafkaListenerEndpointRegistry';
nested exception is org.apache.kafka.common.config.ConfigException:
Missing required configuration "key.deserializer" which has no default
value. at
org.springframework.context.support.DefaultLifecycleProcessor.doStart(DefaultLifecycleProcessor.java:181)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.context.support.DefaultLifecycleProcessor.access$200(DefaultLifecycleProcessor.java:54)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.context.support.DefaultLifecycleProcessor$LifecycleGroup.start(DefaultLifecycleProcessor.java:356)
~[spring-context-5.3.16.jar:5.3.16] at
java.base/java.lang.Iterable.forEach(Iterable.java:75) ~[na:na] at
org.springframework.context.support.DefaultLifecycleProcessor.startBeans(DefaultLifecycleProcessor.java:155)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.context.support.DefaultLifecycleProcessor.onRefresh(DefaultLifecycleProcessor.java:123)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.context.support.AbstractApplicationContext.finishRefresh(AbstractApplicationContext.java:935)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:586)
~[spring-context-5.3.16.jar:5.3.16] at
org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:145)
~[spring-boot-2.6.4.jar:2.6.4] at
org.springframework.boot.SpringApplication.refresh(SpringApplication.java:740)
~[spring-boot-2.6.4.jar:2.6.4] at
org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:415)
~[spring-boot-2.6.4.jar:2.6.4] at
org.springframework.boot.SpringApplication.run(SpringApplication.java:303)
~[spring-boot-2.6.4.jar:2.6.4] at
org.springframework.boot.SpringApplication.run(SpringApplication.java:1312)
~[spring-boot-2.6.4.jar:2.6.4] at
org.springframework.boot.SpringApplication.run(SpringApplication.java:1301)
~[spring-boot-2.6.4.jar:2.6.4] at
com.example.tacocloud.TacoCloudApplication.main(TacoCloudApplication.java:10)
~[classes/:na] at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method) ~[na:na] at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[na:na] at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[na:na] at
java.base/java.lang.reflect.Method.invoke(Method.java:566) ~[na:na]
at
org.springframework.boot.devtools.restart.RestartLauncher.run(RestartLauncher.java:49)
~[spring-boot-devtools-2.6.4.jar:2.6.4] Caused by:
org.apache.kafka.common.config.ConfigException: Missing required
configuration "key.deserializer" which has no default value. at
org.apache.kafka.common.config.ConfigDef.parseValue(ConfigDef.java:493)
~[kafka-clients-2.8.0.jar:na] at
org.apache.kafka.common.config.ConfigDef.parse(ConfigDef.java:483)
~[kafka-clients-2.8.0.jar:na] at
org.apache.kafka.common.config.AbstractConfig.(AbstractConfig.java:108)
~[kafka-clients-2.8.0.jar:na] at
org.apache.kafka.common.config.AbstractConfig.(AbstractConfig.java:129)
~[kafka-clients-2.8.0.jar:na] at
org.apache.kafka.clients.consumer.ConsumerConfig.(ConsumerConfig.java:640)
~[kafka-clients-2.8.0.jar:na] at
org.apache.kafka.clients.consumer.KafkaConsumer.(KafkaConsumer.java:665)
~[kafka-clients-2.8.0.jar:na] at
org.springframework.kafka.core.DefaultKafkaConsumerFactory.createRawConsumer(DefaultKafkaConsumerFactory.java:416)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.core.DefaultKafkaConsumerFactory.createKafkaConsumer(DefaultKafkaConsumerFactory.java:384)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.core.DefaultKafkaConsumerFactory.createConsumerWithAdjustedProperties(DefaultKafkaConsumerFactory.java:360)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.core.DefaultKafkaConsumerFactory.createKafkaConsumer(DefaultKafkaConsumerFactory.java:327)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.core.DefaultKafkaConsumerFactory.createConsumer(DefaultKafkaConsumerFactory.java:304)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.(KafkaMessageListenerContainer.java:758)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.listener.KafkaMessageListenerContainer.doStart(KafkaMessageListenerContainer.java:344)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.listener.AbstractMessageListenerContainer.start(AbstractMessageListenerContainer.java:442)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.listener.ConcurrentMessageListenerContainer.doStart(ConcurrentMessageListenerContainer.java:209)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.listener.AbstractMessageListenerContainer.start(AbstractMessageListenerContainer.java:442)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.config.KafkaListenerEndpointRegistry.startIfNecessary(KafkaListenerEndpointRegistry.java:331)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.kafka.config.KafkaListenerEndpointRegistry.start(KafkaListenerEndpointRegistry.java:276)
~[spring-kafka-2.8.3.jar:2.8.3] at
org.springframework.context.support.DefaultLifecycleProcessor.doStart(DefaultLifecycleProcessor.java:178)
~[spring-context-5.3.16.jar:5.3.16] ... 19 common frames omitted
Process finished with exit code 0
Thank you for a sample.
So, I opened it locally and placed a break point into this bean definition:
#Bean
public DefaultKafkaConsumerFactory<Object, Order> orderConsumerFactory(#Qualifier("orderConsumerConfig")
Map<String, Object> orderConsumerConfig) {
return new DefaultKafkaConsumerFactory<Object, Order>(orderConsumerConfig);
}
That orderConsumerConfig map looks like this:
orderConsumerConfig = {LinkedHashMap#8587} size = 1
"orderConsumerConfig" -> {HashMap#8600} size = 5
key = "orderConsumerConfig"
value = {HashMap#8600} size = 5
"key.deserializer" -> "org.apache.kafka.common.serialization.StringDeserializer"
"template" -> {LinkedHashMap#8611} size = 1
"topic" -> "tacocloud.orders.topic"
"bootstrap.servers" -> "localhost:29888"
"value.deserializer" -> "sample.kafka.serializer.OrderDeserializer"
so, that's indeed not a surprise that your KafkaConsumer is not initialized properly. Your target map config is hidden under that orderConsumerConfig entry of this injected map.
Would you mind to share with me from where did you take an idea of the #ConfigurationProperties on the Map bean? And how that supposed to be used as dependency injection?
I wanted to do something similar (configure multiple ConsumerFactories) based on properties.
I used #ConfigurationProperties to create a Map<String,String> instead of Map<String,Object> and then added the items of that map into a new Map<String,Object>. Not sure why Spring-Boot loaded the Map<String,Object> that way.
#Bean
#ConfigurationProperties("taco-cart.kafka")
public Map<String, String> tacoCartKafkaProperties() {
return new HashMap<>();
}
#Bean
public ConsumerFactory<String, TacoCart> tacoCartConsumerFactory(#Qualifier("tacoCartKafkaProperties") Map<String, String> tacoCartKafkaProperties) {
// Convert map.
Map<String, Object> config = new HashMap<>();
config.putAll(tacoCartKafkaProperties);
return new DefaultKafkaConsumerFactory<>(config);
}

Spring integration / GCP PubSub : channel subscriber lost

Everything is mostly in the title.
I have a specific channel to send data to PubSub, using Spring integration and this information about GCP PubSub
I don't have any problem locally, or on the QA env.
But, in the prod, I have the following error :
org.springframework.messaging.MessageDeliveryException: failed to send Message to channel
'pubSubFlow.channel#1'; nested exception is java.lang.IllegalStateException: The [bean
'pubSubFlow.channel#1'; defined in: 'class path resource [fr/auchan/lark/tracking/api/v1
/pubsub/PubSubRequestIntegration.class]'; from source: 'bean method pubSubFlow'] doesn't
have subscribers to accept messages
org.springframework.integration.support.utils.IntegrationUtils.wrapInDeliveryExceptionIfNecessary(IntegrationUtils.java:167) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:600) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:520) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.FluxMessageChannel.lambda$subscribeTo$2(FluxMessageChannel.java:83) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:189) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.runAsync(FluxPublishOn.java:439) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.publisher.FluxPublishOn$PublishOnSubscriber.run(FluxPublishOn.java:526) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.scheduler.WorkerTask.call(WorkerTask.java:84) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
reactor.core.scheduler.WorkerTask.call(WorkerTask.java:37) ~[reactor-core-3.3.13.RELEASE.jar:3.3.13.RELEASE]
java.base/java.util.concurrent.FutureTask.run(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) ~[na:na]
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) ~[na:na]
java.base/java.lang.Thread.run(Unknown Source) ~[na:na]
Caused by: java.lang.IllegalStateException: The [bean 'pubSubFlow.channel#1'; defined in: 'class path resource [PubSubRequestIntegration.class]'; from source: 'bean method pubSubFlow'] doesn't have subscribers to accept messages
org.springframework.util.Assert.state(Assert.java:97) ~[spring-core-5.2.12.RELEASE.jar:5.2.12.RELEASE]
org.springframework.integration.channel.FluxMessageChannel.doSend(FluxMessageChannel.java:61) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:570) ~[spring-integration-core-5.3.5.RELEASE.jar:5.3.5.RELEASE]
12 common frames omitted
Below is my channel declaration, and the use of ServiceActivator as written is the PubSub Guidelines (see the link above).
#Bean
public MessageChannel dataChannel() {
return MessageChannels.publishSubscribe(Executors.newCachedThreadPool()).get();
}
#Bean
public MessageChannel pubSubChannel() {
return MessageChannels.publishSubscribe(Executors.newCachedThreadPool()).get();
}
#Bean
public IntegrationFlow pubSubFlow(
MessageChannel dataChannel,
MessageChannel pubSubChannel) {
return IntegrationFlows
.from(dataChannel)
.fluxTransform(this::toPubSubFormat)
.channel(pubSubChannel)
.get();
}
#Bean
#ServiceActivator(inputChannel = "pubSubChannel")
public PubSubMessageHandler sendToPubSub(PubSubTemplate pubSubTemplate) {
PubSubMessageHandler adapter = new PubSubMessageHandler(pubSubTemplate,
pubSubIntegrationProperties.getTopic());
adapter.setPublishCallback(
new ListenableFutureCallback<>() {
#Override
public void onFailure(Throwable throwable) {
log.warn("There was the following error sending the message. " + throwable);
}
#Override
public void onSuccess(String result) {
log.debug("Message was sent via the outbound channel adapter to {} : {}", pubSubIntegrationProperties.getTopic(), result);
}
});
return adapter;
}
Did I miss something? Why is the pubSubChannel marked as having no subscribers?
Thanks for the help

#RabbitListener is not working with queue names pattern

Me using spring boot for my project where i need to listen few queues at run time with pattern enables in rabbitlister.
i have tried #RabbitListener(queues="queue*").
but i throws exception with constant should be used in queue name.
Any help would be appreciated .
And also i have query with json convertor in spring boot rabbitmq:
#Bean
SimpleMessageListenerContainer container(ConnectionFactory connectionFactory, MessageListenerAdapter listenerAdapter,Queue notificationQueue) {
SimpleMessageListenerContainer container = new SimpleMessageListenerContainer();
container.setConnectionFactory(connectionFactory);
container.setQueueNames(notificationQueue.getName());
container.setMessageListener(listenerAdapter);
return container;
}
#Bean
Reciver receiver() {
return new Reciver ();
}
#Bean
MessageListenerAdapter listenerAdapter(Reciver receiver) {
return new MessageListenerAdapter(receiver,"receiveMethod");
}
And IN Reciever
receiveMethod(Message msg){
//Code
}
Here i get exception says that
org.springframework.amqp.rabbit.listener.exception.ListenerExecutionFailedException: Failed to invoke target method 'onMessage' with argument type = [class [B], value = [{[B#4ed663ad}]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.invokeListenerMethod(MessageListenerAdapter.java:408) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.onMessage(MessageListenerAdapter.java:298) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.doInvokeListener(AbstractMessageListenerContainer.java:757) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.invokeListener(AbstractMessageListenerContainer.java:680) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.access$001(SimpleMessageListenerContainer.java:93) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer$1.invokeListener(SimpleMessageListenerContainer.java:183) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.invokeListener(SimpleMessageListenerContainer.java:1358) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.AbstractMessageListenerContainer.executeListener(AbstractMessageListenerContainer.java:661) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.doReceiveAndExecute(SimpleMessageListenerContainer.java:1102) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.receiveAndExecute(SimpleMessageListenerContainer.java:1086) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer.access$1100(SimpleMessageListenerContainer.java:93) [spring-rabbit-1.5.5.RELEASE.jar:na]
at org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer$AsyncMessageProcessingConsumer.run(SimpleMessageListenerContainer.java:1203) [spring-rabbit-1.5.5.RELEASE.jar:na]
at java.lang.Thread.run(Unknown Source) [na:1.8.0_77]
Caused by: java.lang.NoSuchMethodException: com.mdm.amqp.MDMNotificationListener.onMessage([B)
at java.lang.Class.getMethod(Unknown Source) ~[na:1.8.0_77]
at org.springframework.util.MethodInvoker.prepare(MethodInvoker.java:174) ~[spring-core-4.2.5.RELEASE.jar:4.2.5.RELEASE]
at org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter.invokeListenerMethod(MessageListenerAdapter.java:386) ~[spring-rabbit-1.5.5.RELEASE.jar:na]
... 12 common frames omitted

Resources