kafka listener container in spring boot with annotations does not consume message - spring-boot

Not sure if I am doing some thing wrong but I could not manage to make it work. Below is my code:
public class EventsApp {
private static final Logger log = LoggerFactory.getLogger(EventsApp.class);
#Value("${kafka.topic:test}")
private String topic;
#Value("${kafka.messageKey:si.key}")
private String messageKey;
#Value("${kafka.broker.address:localhost:9092}")
private String brokerAddress;
#Value("${kafka.zookeeper.connect:localhost:2181}")
private String zookeeperConnect;
/**
* Main method, used to run the application.
*
* #param args the command line arguments
* #throws UnknownHostException if the local host name could not be resolved into an address
*/
public static void main(String[] args) throws UnknownHostException, Exception {
ConfigurableApplicationContext context
= new SpringApplicationBuilder(EventsApp.class)
.web(false)
.run(args);
MessageChannel toKafka = context.getBean("toKafka", MessageChannel.class);
for (int i = 0; i < 10; i++) {
System.out.println("sending.."+toKafka.toString());
toKafka.send(new GenericMessage<>("foo" + i));
}
Thread.sleep(115000);
context.close();
System.exit(0);
}
#KafkaListener(id = "baz", topics = "test",
containerFactory = "kafkaListenerContainerFactory")
public void listen(String data, Acknowledgment ack) {
System.out.println("----- "+data);
ack.acknowledge();
}
#Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>>
kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<Integer, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(1);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
#ServiceActivator(inputChannel = "toKafka")
#Bean
public MessageHandler handler() throws Exception {
KafkaProducerMessageHandler<String, String> handler =
new KafkaProducerMessageHandler<>(kafkaTemplate());
handler.setTopicExpression(new LiteralExpression(this.topic));
handler.setMessageKeyExpression(new LiteralExpression(this.messageKey));
return handler;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
#Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(props);
}
#Bean
public ConsumerFactory<Integer, String> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.brokerAddress);
//props.put("zookeeper.connect", this.zookeeperConnect);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "siTestGroup");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 100);
props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public TopicCreator topicCreator() {
return new TopicCreator(this.topic, this.zookeeperConnect);
}
public static class TopicCreator implements SmartLifecycle {
private final String topic;
private final String zkConnect;
private volatile boolean running;
public TopicCreator(String topic, String zkConnect) {
this.topic = topic;
this.zkConnect = zkConnect;
}
#Override
public void start() {
ZkUtils zkUtils = new ZkUtils(new ZkClient(this.zkConnect, 6000, 6000,
ZKStringSerializer$.MODULE$), null, false);
try {
AdminUtils.createTopic(zkUtils, topic, 1, 1, new Properties());
}
catch (TopicExistsException e) {
// no-op
}
this.running = true;
}
#Override
public void stop() {
}
#Override
public boolean isRunning() {
return this.running;
}
#Override
public int getPhase() {
return Integer.MIN_VALUE;
}
#Override
public boolean isAutoStartup() {
return true;
}
#Override
public void stop(Runnable callback) {
callback.run();
}
}
}
While I am able to produce message. I am using spring boot version 1.4.0.RELEASE and spring-integration-kafka version 2.0.1.RELEASE.

Try adding
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
to the consumer config.
By default, kafka will start the consumer at the end of the topic.
If that doesn't work, turn on DEBUG logging to figure out what's going on.

Related

Spring Boot Kafka Configure DefaultErrorHandler?

I created a batch-consumer following the Spring Kafka docs:
#SpringBootApplication
public class ApplicationConsumer {
private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationConsumer.class);
private static final String TOPIC = "foo";
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(ApplicationConsumer.class, args);
}
#Bean
public RecordMessageConverter converter() {
return new JsonMessageConverter();
}
#Bean
public BatchMessagingMessageConverter batchConverter() {
return new BatchMessagingMessageConverter(converter());
}
#KafkaListener(topics = TOPIC)
public void listen(List<Name> ps) {
LOGGER.info("received name beans: {}", Arrays.toString(ps.toArray()));
}
}
I was able to successfully get the consumer running by defining the following additional configuration env variables, that Spring automatically picks up:
export SPRING_KAFKA_BOOTSTRAP-SERVERS=...
export SPRING_KAFKA_CONSUMER_GROUP-ID=...
So the above code works. But now I want to customize the default error handler to use exponential backoff. From the ref docs I tried adding the following to ApplicationConsumer class:
#Bean
public ConcurrentKafkaListenerContainerFactory<?, ?> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setCommonErrorHandler(new DefaultErrorHandler(new ExponentialBackOffWithMaxRetries(10)));
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public ConsumerFactory<String, Object> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
return props;
}
But now I get errors saying that it can't find some of the configuration. It looks like I'm stuck having to redefine all of the properties in consumerConfigs() that were already being automatically defined before. This includes everything from bootstrap server uris to the json-deserialization config.
Is there a good way to update my first version of the code to just override the default-error handler?
Just define the error handler as a #Bean and Boot will automatically wire it into its auto configured container factory.
EDIT
This works as expected for me:
#SpringBootApplication
public class So70884203Application {
public static void main(String[] args) {
SpringApplication.run(So70884203Application.class, args);
}
#Bean
DefaultErrorHandler eh() {
return new DefaultErrorHandler((rec, ex) -> {
System.out.println("Recovered: " + rec);
}, new FixedBackOff(0L, 0L));
}
#KafkaListener(id = "so70884203", topics = "so70884203")
void listen(String in) {
System.out.println(in);
throw new RuntimeException("test");
}
#Bean
NewTopic topic() {
return TopicBuilder.name("so70884203").partitions(1).replicas(1).build();
}
}
foo
Recovered: ConsumerRecord(topic = so70884203, partition = 0, leaderEpoch = 0, offset = 0, CreateTime = 1643316625291, serialized key size = -1, serialized value size = 3, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = foo)

EmbeddedKafka does not invoke listener when using avro schema

I have been trying to write a simple Kafka Listener unit test using KafkaEmbedded. However my listener does not gets invoked. I have been using this link for inspiration since I also need an Avro Serializer/DeSerializer.
Below are how my test class looks like.
#ExtendWith(SpringExtension.class)
public class KafkaTest{
public static final String TOPIC_2 = "topic";
#Autowired
private Service listener;
#Autowired
private SomeClient someClient;
#Autowired
private KafkaTemplate<String, Avro> template;
#Test
void testSimple() {
template.send(TOPIC_2, "test", Avro.newBuilder()
.setGameProvider("abcd")
.setMessageName("someMessageName")
.setRequestId(UUID.randomUUID().toString())
.setBody(UUID.randomUUID().toString())
.build());
verify(someClient).register(anyString(), anyString(), anyString(), anyString());
template.flush();
}
#Configuration
#EnableKafka
public static class Config {
#Bean
public EmbeddedKafkaBroker kafkaEmbedded() {
return new EmbeddedKafkaBroker(1, true, 1, TOPIC_2);
}
#Bean
public ConsumerFactory<String, Avro> createConsumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ConsumerConfig.GROUP_ID_CONFIG, "group-1");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, CustomKafkaAvroDeserializer.class);
props.put("schema.registry.url", "not-used");
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Avro> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Avro> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(createConsumerFactory());
return factory;
}
#MockBean
private SomeClient client;
#MockBean
private Marshaller marshaller;
#Bean
public SomeService listener() {
return new SomeService(client, marshaller, kafkaTemplate());
}
#Bean
public ProducerFactory<String, Avro> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaEmbedded().getBrokersAsString());
props.put(ProducerConfig.RETRIES_CONFIG, 1);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, CustomKafkaAvroSerializer.class);
props.put("schema.registry.url", "not-used");
return new DefaultKafkaProducerFactory<>(props);
}
#Bean
public KafkaTemplate<String, Avro> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
}
}
public class CustomKafkaAvroDeserializer extends KafkaAvroDeserializer {
#Override
public Object deserialize(String topic, byte[] bytes) {
if (topic.equals("topic")) {
this.schemaRegistry = getMockClient(SafeReport.SCHEMA$);
}
return super.deserialize(topic, bytes);
}
private static SchemaRegistryClient getMockClient(final Schema schema$) {
return new MockSchemaRegistryClient() {
#Override
public synchronized Schema getById(int id) {
return schema$;
}
};
}
}
public class CustomKafkaAvroSerializer extends KafkaAvroSerializer {
public CustomKafkaAvroSerializer() {
super();
super.schemaRegistry = new MockSchemaRegistryClient();
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client) {
super(new MockSchemaRegistryClient());
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client, Map<String, ?> props) {
super(new MockSchemaRegistryClient(), props);
}
}
#Service
#EnableBinding(Sink.class)
public class SomeService {
#KafkaListener(topics = "topic", groupId = "group-1")
public void listen(ConsumerRecord<String, Avro> cr) {
System.out.println(String.format("#### -> Consumed message -> %s", cr.toString()));
}
}
My listener is never invoked when I run this test.

How to access MessageHeaders in Kafka batch listener

I have the below Autoconfiguration class for Kafka:
#Configuration
#EnableKafka
#ConditionalOnClass(KafkaReceiver.class)
#ConditionalOnProperty({"spring.kafka.bootstrap-servers"})
public class KafkaAutoConfiguration<T> { #Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers;
private KafkaProperties kafkaConfig;
private String groupId;
#Autowired
public void setKafkaProperties(KafkaProperties properties) {
this.kafkaConfig = properties;
}
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,
bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
org.apache.kafka.common.serialization.ByteArrayDeserializer.class);
props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaConfig.getGroupId());
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaBatchListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setBatchListener(true);
return factory;
}
#Bean
public DefaultKafkaHeaderMapper headerMapper(){
return new DefaultKafkaHeaderMapper();
}
#Bean("simpleReceiver")
#ConditionalOnMissingBean(name = "simpleReceiver")
#ConditionalOnProperty({"service.kafka.consumer.topics"})
public KafkaReceiver simpleReceiver() {
return new KafkaSimpleReceiver();
}
#Bean("batchReceiver")
#ConditionalOnMissingBean(name = "batchReceiver")
#ConditionalOnProperty({"service.kafka.consumer.batch-topics"})
public KafkaReceiver batchReceiver() {
return new KafkaBatchReceiver();
}
}
Simple listener bean:
public class KafkaSimpleReceiver implements KafkaReceiver {
#KafkaListener(topics = "#{'${service.kafka.consumer.topics}'.split(',')}", containerFactory = "kafkaListenerContainerFactory")
public void receive(ConsumerRecord record, #Headers MessageHeaders headers) throws KafkaException {
}
}
Batch listener bean:
public class KafkaBatchReceiver implements KafkaReceiver {
#KafkaListener(topics = "#{'${service.kafka.consumer.batch-topics}'.split(',')}", containerFactory = "kafkaBatchListenerContainerFactory")
public void receive(List<ConsumerRecord> records, #Headers MessageHeaders headers) throws KafkaException {
}
}
Simple listener is working fine, but I am getting the following error for batch listener. How can we access MessageHeaders in this case?
A parameter of type 'List<ConsumerRecord>' must be the only parameter (except for an optional 'Acknowledgment' and/or 'Consumer')
EDIT
This is what I did to convert ConsumerRecord to MessageHeaders
public void receive(List<ConsumerRecord> records) {
for(ConsumerRecord record : records) {
Map<String, Object> headersList = new HashMap<>();
for(final Header h : record.headers()) {
headersList.put(h.key(), new String(h.value()));
}
MessageHeaders headers = new MessageHeaders(headersList);
}
}
You can get a list of Message<?>.
#KafkaListener(topics = "so54086076", id = "so54086076")
public void listen(List<Message<?>> records) {
System.out.println(records.size() + ":" + records);
}
The message payloads will be the ConsumerRecord.value(); the other ConsumerRecord properties will be in the headers.

Get String from Spring FTP streaming Inbound Channel Adapter

I have the following code which works OK retrieving files from FTP server into a stream, but I need to get String of each file, seems I need to use the Transformer passing a charset but what I'm missing? How exactly to get the content String of each file transferred?
Thanks a lot in advance
#SpringBootApplication
#EnableIntegration
public class FtpinboundApp extends SpringBootServletInitializer implements WebApplicationInitializer {
final static Logger logger = Logger.getLogger(FtpinboundApp.class);
public static void main(String[] args) {
SpringApplication.run(FtpinboundApp.class, args);
}
#Bean
public SessionFactory<FTPFile> ftpSessionFactory() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost("X.X.X.X");
sf.setPort(21);
sf.setUsername("xxx");
sf.setPassword("XXX");
return new CachingSessionFactory<FTPFile>(sf);
}
#Bean
#ServiceActivator(inputChannel = "stream")
public MessageHandler handler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
System.out.println("trasnferred file:" + message.getPayload());
}
};
}
#Bean
#InboundChannelAdapter(value = "stream", poller = #Poller(fixedRate = "1000"))
public MessageSource<InputStream> ftpMessageSource() {
FtpStreamingMessageSource messageSource = new FtpStreamingMessageSource(template(), null);
messageSource.setRemoteDirectory("/X/X/X");
messageSource.setFilter(new FtpPersistentAcceptOnceFileListFilter(new SimpleMetadataStore(), "streaming"));
return messageSource;
}
#Bean
#Transformer(inputChannel = "stream", outputChannel = "data")
public org.springframework.integration.transformer.Transformer transformer() {
return new StreamTransformer("UTF-8");
}
#Bean
public FtpRemoteFileTemplate template() {
return new FtpRemoteFileTemplate(ftpSessionFactory());
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(5000));
return pollerMetadata;
}
}
Use a StreamTransformer to get the whole file as a single string, or a FileSplitter to get a message for each line.
EDIT (filter config)
#Bean
#InboundChannelAdapter(channel = "stream")
public MessageSource<InputStream> ftpMessageSource() {
FtpStreamingMessageSource messageSource = new FtpStreamingMessageSource(template(), null);
messageSource.setRemoteDirectory("ftpSource/");
messageSource.setFilter(filter());
return messageSource;
}
public FileListFilter<FTPFile> filter() {
CompositeFileListFilter<FTPFile> filter = new CompositeFileListFilter<>();
filter.addFilter(new FtpSimplePatternFileListFilter("*.txt"));
filter.addFilter(acceptOnceFilter());
return filter;
}
#Bean
public FtpPersistentAcceptOnceFileListFilter acceptOnceFilter() {
FtpPersistentAcceptOnceFileListFilter filter = new FtpPersistentAcceptOnceFileListFilter(meta(),
"streaming"); // keys will be, e.g. "streamingfoo.txt"
filter.setFlushOnUpdate(true);
return filter;
}
#Bean
public ConcurrentMetadataStore meta() {
PropertiesPersistingMetadataStore meta = new PropertiesPersistingMetadataStore();
meta.setBaseDirectory("/tmp/foo");
meta.setFileName("ftpStream.properties");
return meta;
}
EDIT2 - remove remote file with an advice
#ServiceActivator(inputChannel = "data", adviceChain = "after")
#Bean
public MessageHandler handle() {
return System.out::println;
}
#Bean
public ExpressionEvaluatingRequestHandlerAdvice after() {
ExpressionEvaluatingRequestHandlerAdvice advice = new ExpressionEvaluatingRequestHandlerAdvice();
advice.setOnSuccessExpression(
"#template.remove(headers['file_remoteDirectory'] + headers['file_remoteFile'])");
advice.setPropagateEvaluationFailures(true);
return advice;
}

HTTP-based metadata provider with SSL - Using MethodInvokingFactoryBean for Java-based configuration

I'm learning example on https://github.com/vdenotaris/spring-boot-security-saml-sample and I don't understand why author using both MethodInvokingFactoryBean and TLSProtocolConfigurer in WebSecurityConfig. What difference between them? Essentially the same results was obtained in each separate case of creating beans and MethodInvokingFactoryBean result has been overridden by TLSProtocolConfigurer.
com.vdenotaris.spring.boot.security.saml.web.config.WebSecurityConfig
#Bean
public KeyManager keyManager() {
DefaultResourceLoader loader = new DefaultResourceLoader();
Resource storeFile = loader
.getResource("classpath:/saml/samlKeystore.jks");
String storePass = "nalle123";
Map<String, String> passwords = new HashMap<String, String>();
passwords.put("apollo", "nalle123");
String defaultKey = "apollo";
return new JKSKeyManager(storeFile, storePass, passwords, defaultKey);
}
#Bean
public TLSProtocolConfigurer tlsProtocolConfigurer() {
return new TLSProtocolConfigurer();
}
#Bean
public ProtocolSocketFactory socketFactory() {
return new TLSProtocolSocketFactory(keyManager(), null, "default");
}
#Bean
public Protocol socketFactoryProtocol() {
return new Protocol("https", socketFactory(), 443);
}
#Bean
public MethodInvokingFactoryBean socketFactoryInitialization() {
MethodInvokingFactoryBean methodInvokingFactoryBean = new MethodInvokingFactoryBean();
methodInvokingFactoryBean.setTargetClass(Protocol.class);
methodInvokingFactoryBean.setTargetMethod("registerProtocol");
Object[] args = {"https", socketFactoryProtocol()};
methodInvokingFactoryBean.setArguments(args);
return methodInvokingFactoryBean;
}
org.springframework.security.saml.trust.httpclient.TLSProtocolConfigurer
private String protocolName = "https";
private int protocolPort = 443;
private KeyManager keyManager;
private String sslHostnameVerification = "default";
private Set<String> trustedKeys;
#Override
public void afterPropertiesSet() throws Exception {
ProtocolSocketFactory socketFactory = new TLSProtocolSocketFactory(keyManager, trustedKeys, sslHostnameVerification);
Protocol p = new Protocol(protocolName, socketFactory, protocolPort);
Protocol.registerProtocol(protocolName, p);
}
#Autowired
public void setKeyManager(KeyManager keyManager) {
this.keyManager = keyManager;
}
Would you please tell me what I missed?

Resources