Spring Integration Flow route based on file name - spring

I have a requirement to unzip and file and process its contents. Inside the zip file, there could be two types of file individual or firm. That can be distinguished by file name. After processing of all files, it should call another program module, also archive the processed file in a different location. Would like use Spring integration for the same. I am trying to achieve this by the following code, but it is creating problem while routing based on file name. I am using JDK 8, Spring 5
.<File, Boolean>route(new Function<File, Boolean>() {
#Override
public Boolean apply(File f) {
return f.getName().contains("individual");
}
}, m -> m
.subFlowMapping(true, sf -> sf.gateway(individualProcessor()))
.subFlowMapping(false, sf -> sf.gateway(firmProcessor()))
)
Exception
Caused by: java.lang.IllegalArgumentException: Found ambiguous parameter type [interface java.util.function.Function] for method match: [public default <V> java.util.function.Function<V, R> java.util.function.Function.compose(java.util.function.Function<? super V, ? extends T>), public static <T> java.util.function.Function<T, T> java.util.function.Function.identity(), public java.lang.Boolean com.xxx.thirdpatysystem.config.IntegrationConfig$1.apply(java.io.File)]
at org.springframework.util.Assert.isNull(Assert.java:155) ~[spring-core-5.0.4.RELEASE.jar:5.0.4.RELEASE]
at org.springframework.integration.util.MessagingMethodInvokerHelper.findHandlerMethodsForTarget(MessagingMethodInvokerHelper.java:843) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.util.MessagingMethodInvokerHelper.<init>(MessagingMethodInvokerHelper.java:362) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.util.MessagingMethodInvokerHelper.<init>(MessagingMethodInvokerHelper.java:231) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.util.MessagingMethodInvokerHelper.<init>(MessagingMethodInvokerHelper.java:225) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.handler.MethodInvokingMessageProcessor.<init>(MethodInvokingMessageProcessor.java:60) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.router.MethodInvokingRouter.<init>(MethodInvokingRouter.java:46) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.dsl.IntegrationFlowDefinition.route(IntegrationFlowDefinition.java:1922) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.dsl.IntegrationFlowDefinition.route(IntegrationFlowDefinition.java:1895) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
even I have tried below
.<File, Boolean>route(f -> f.getName().contains("individual"), m -> m
.subFlowMapping(true, sf -> sf.gateway(individualProcessor()))
.subFlowMapping(false, sf -> sf.gateway(firmProcessor()))
)
Exception
Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.integration.dsl.IntegrationFlow]: Factory method 'thirdpatysystemFlow' threw exception; nested exception is java.lang.UnsupportedOperationException
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185) ~[spring-beans-5.0.4.RELEASE.jar:5.0.4.RELEASE]
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:579) ~[spring-beans-5.0.4.RELEASE.jar:5.0.4.RELEASE]
... 17 common frames omitted
Caused by: java.lang.UnsupportedOperationException: null
at org.springframework.integration.dsl.StandardIntegrationFlow.configure(StandardIntegrationFlow.java:89) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.dsl.IntegrationFlowDefinition.gateway(IntegrationFlowDefinition.java:2172) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
at org.springframework.integration.dsl.IntegrationFlowDefinition.gateway(IntegrationFlowDefinition.java:2151) ~[spring-integration-core-5.0.3.RELEASE.jar:5.0.3.RELEASE]
Entire code snippet is below
import java.io.File;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.integration.core.MessageSource;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.Pollers;
import org.springframework.integration.dsl.channel.MessageChannels;
import org.springframework.integration.file.FileReadingMessageSource;
import org.springframework.integration.file.FileWritingMessageHandler;
import org.springframework.integration.file.filters.AcceptOnceFileListFilter;
import org.springframework.integration.file.filters.ChainFileListFilter;
import org.springframework.integration.file.filters.RegexPatternFileListFilter;
import org.springframework.integration.zip.splitter.UnZipResultSplitter;
import org.springframework.integration.zip.transformer.UnZipTransformer;
import org.springframework.integration.zip.transformer.ZipResultType;
import org.springframework.messaging.MessageHandler;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
/**
* #author dpoddar
*
*/
#Configuration
#EnableIntegration
public class IntegrationConfig {
#Value("${input.directory}")
private String inputDir;
#Value("${outputDir.directory}")
private String outputDir;
#Value("${input.scan.frequency: 100}")
private long scanFrequency;
#Bean
public MessageSource<File> inputFileSource() {
FileReadingMessageSource src = new FileReadingMessageSource();
src.setDirectory(new File(inputDir));
src.setAutoCreateDirectory(true);
ChainFileListFilter<File> chainFileListFilter = new ChainFileListFilter<>();
chainFileListFilter.addFilter(new AcceptOnceFileListFilter<>() );
chainFileListFilter.addFilter(new RegexPatternFileListFilter("(?i)^.+\\.zip$"));
src.setFilter(chainFileListFilter);
return src;
}
#Bean
public UnZipTransformer unZipTransformer() {
UnZipTransformer unZipTransformer = new UnZipTransformer();
unZipTransformer.setExpectSingleResult(false);
unZipTransformer.setZipResultType(ZipResultType.FILE);
//unZipTransformer.setWorkDirectory(new File("/usr/tmp/uncompress"));
unZipTransformer.setDeleteFiles(true);
return unZipTransformer;
}
#Bean
public UnZipResultSplitter splitter() {
UnZipResultSplitter splitter = new UnZipResultSplitter();
return splitter;
}
#Bean
public DirectChannel outputChannel() {
return new DirectChannel();
}
#Bean
public MessageHandler fileOutboundChannelAdapter() {
FileWritingMessageHandler adapter = new FileWritingMessageHandler(new File(outputDir));
adapter.setDeleteSourceFiles(true);
adapter.setAutoCreateDirectory(true);
adapter.setExpectReply(false);
return adapter;
}
#Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(5);
executor.setMaxPoolSize(10);
executor.setQueueCapacity(25);
return executor;
}
#Autowired
DirectChannel outputChannel;
#Autowired
MessageHandler fileOutboundChannelAdapter;
#Bean
public IntegrationFlow individualProcessor() {
return flow -> flow.handle("thirdpatysystemprocessor","processfile").channel(outputChannel).handle(fileOutboundChannelAdapter);
}
#Bean
public IntegrationFlow firmProcessor() {
return flow -> flow.handle("thirdpatysystemprocessor","processfile").channel(outputChannel).handle(fileOutboundChannelAdapter);
}
#Bean
public IntegrationFlow thirdpatysystemAgentDemographicFlow() {
return IntegrationFlows
.from(inputFileSource(), spec -> spec.poller(Pollers.fixedDelay(scanFrequency,TimeUnit.SECONDS)))
.transform(unZipTransformer())
.split(splitter())
.channel(MessageChannels.executor(taskExecutor()))
.<File, Boolean>route(new Function<File, Boolean>() {
#Override
public Boolean apply(File f) {
return f.getName().contains("individual");
}
}, m -> m
.subFlowMapping(true, sf -> sf.gateway(individualProcessor()))
.subFlowMapping(false, sf -> sf.gateway(firmProcessor()))
)
.aggregate()
/*.handle("thirdpatysystemprocessor","processfile")
.channel(outputChannel())
.handle(fileOutboundChannelAdapter())*/
.get()
;
}
}

The java.lang.IllegalArgumentException: Found ambiguous parameter type [interface java.util.function.Function] has been fixed in the Spring Integration 5.0.5: https://jira.spring.io/browse/INT-4456. So, now with an explicit Function impl we do this:
MethodInvokingRouter methodInvokingRouter = isLambda(router)
? new MethodInvokingRouter(new LambdaMessageProcessor(router, payloadType))
: new MethodInvokingRouter(router, ClassUtils.FUNCTION_APPLY_METHOD);
We explicitly point to the apply() method.
The re-use of existing IntegrationFlow beans in the sub-flows (gateway()) has been fixed in version 5.0.4: https://jira.spring.io/browse/INT-4434
So, what you need is just to upgrade your project to the latest dependencies. In particular Spring Integration 5.0.7: https://spring.io/projects/spring-integration#learn

Related

Handling Spring Batch flow job

Situation :
There is a batch steps named A, B, C, D, E.
A is always executed first.
Depending on A's exitStatus, if A gives "SOMECONDITION", B is executed. On "OTHERCONDITION" C is executed. Job instances are not allowed to execute both B and C.
Regardless of executed Step is B or C, they should go to D.
After executing D, by using next(), E is executed.
Problem : Ugly code structure.
By redundant use of on(), to(), I managed to figure out the flow that I need.
As directly observable, there is really ugly code next() on each on-to, to direct them into common flow.
Adding more ugliness, I have to use from()-on("*") to direct each of on-to-next to some common flow.
Question :
At least this works, but i wonder, is this the only code that satisfies above condition?
Problem : Ugly code structure.
Adding more ugliness, I have to [...]
We, the Spring team, are always open to constructive feedback. So if you have ideas on how to improve the DSL of defining job flows, you are welcome to open a ticket on our issue tracker and we will do our best to improve things if needed.
Now to answer your question, the next(...) statements after each on("..").to("..") are not necessary. This is not what the documentation says, see example here: Conditional Flow.
According to your description, the flow to implement is the following:
This is actually a conditional flow (A -> decider -> B or C) followed by a sequential flow (D -> E).
The way I would define such flow is something like:
#Bean
public Job job() {
Flow conditionalFlow = new FlowBuilder<SimpleFlow>("conditionalFlow")
.start(stepA())
.next(decider()).on("SOMECONDITION").to(stepB())
.from(decider()).on("OTHERCONDITION").to(stepC())
.end();
return new JobBuilder("myJob", jobRepository)
.start(conditionalFlow)
.next(stepD())
.next(stepE())
.end()
.build();
}
This way, the flows are clearly defined. Here is a complete sample that you can run and check the results (make sure to use Spring Batch 5 and to have HSQLDB in the classpath):
package org.springframework.batch.sample;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.job.builder.FlowBuilder;
import org.springframework.batch.core.job.builder.JobBuilder;
import org.springframework.batch.core.job.flow.Flow;
import org.springframework.batch.core.job.flow.FlowExecutionStatus;
import org.springframework.batch.core.job.flow.JobExecutionDecider;
import org.springframework.batch.core.job.flow.support.SimpleFlow;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.step.builder.StepBuilder;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder;
import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType;
import org.springframework.jdbc.support.JdbcTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
#Configuration
#EnableBatchProcessing
public class MyJobConfiguration {
private final JobRepository jobRepository;
private final PlatformTransactionManager transactionManager;
public MyJobConfiguration(JobRepository jobRepository, PlatformTransactionManager transactionManager) {
this.jobRepository = jobRepository;
this.transactionManager = transactionManager;
}
private Step createStep(String stepName) {
return new StepBuilder("step" + stepName, jobRepository)
.tasklet((contribution, chunkContext) -> {
System.out.println("running step " + stepName);
return RepeatStatus.FINISHED;
}, transactionManager)
.build();
}
#Bean
public Step stepA() {
return createStep("A");
}
#Bean
public Step stepB() {
return createStep("B");
}
#Bean
public Step stepC() {
return createStep("C");
}
#Bean
public Step stepD() {
return createStep("D");
}
#Bean
public Step stepE() {
return createStep("E");
}
#Bean
public JobExecutionDecider decider() {
return (jobExecution, stepExecution) -> {
return new FlowExecutionStatus("SOMECONDITION"); // SOMECONDITION, OTHERCONDITION
};
}
#Bean
public Job job() {
Flow conditionalFlow = new FlowBuilder<SimpleFlow>("conditionalFlow")
.start(stepA())
.next(decider()).on("SOMECONDITION").to(stepB())
.from(decider()).on("OTHERCONDITION").to(stepC())
.end();
return new JobBuilder("myJob", jobRepository)
.start(conditionalFlow)
.next(stepD())
.next(stepE())
.end()
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
// infrastructure beans
static class DataSourceConfiguration {
#Bean
public DataSource dataSource() {
return new EmbeddedDatabaseBuilder()
.setType(EmbeddedDatabaseType.HSQL)
.addScript("/org/springframework/batch/core/schema-hsqldb.sql")
.build();
}
#Bean
public JdbcTransactionManager transactionManager(DataSource dataSource) {
return new JdbcTransactionManager(dataSource);
}
}
}
This sample prints:
running step A
running step B
running step D
running step E
If you make the decider return "OTHERCONDITION", the sample prints:
running step A
running step C
running step D
running step E
which is what is required from your description.

Spring integration email pop3 inbound adapter not working/starting

With java spring integration written the below code to read the email from gmail.
As per logs seems the connection with gmail is formed, but on new email its not reading or not going into handle() method. Please help.
o.s.i.m.AbstractMailReceiver - attempting to receive mail from folder [INBOX]
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.Pollers;
import org.springframework.integration.mail.MailReceiver;
import org.springframework.integration.mail.dsl.Mail;
import org.springframework.integration.mail.support.DefaultMailHeaderMapper;
import org.springframework.integration.mapping.HeaderMapper;
import org.springframework.messaging.Message;
import org.springframework.messaging.PollableChannel;
import javax.mail.internet.MimeMessage;
#Log4j2
#Configuration
#EnableIntegration
public class EmailReceiver {
#Autowired
private PollableChannel pop3Channel;
#Bean
public PollableChannel receivedChannel() {
return new QueueChannel();
}
#Bean
public IntegrationFlow pop3MailFlow() {
return IntegrationFlows
.from(Mail.pop3InboundAdapter("pop.gmail.com", 995, "userName", "password")
.javaMailProperties(p -> {
p.put("mail.debug", "true");
p.put("mail.pop3.socketFactory.fallback", "false");
p.put("mail.pop3.port", 995);
p.put("mail.pop3.socketFactory.class", "javax.net.ssl.SSLSocketFactory");
p.put("mail.pop3.socketFactory.port", 995);
})
.headerMapper(mailHeaderMapper()),
e -> e.poller(Pollers.fixedRate(5000).maxMessagesPerPoll(1)))
.handle((payload, header) -> logMail(payload))
.get();
}
public Message logMail(Object payload) {
Message message = (Message)payload;
log.info("*******Email[TEST]********* ", payload);
return message;
}
#Bean
public HeaderMapper<MimeMessage> mailHeaderMapper() {
return new DefaultMailHeaderMapper();
}
}
The problem is here:
.maxFetchSize(1)
By default the AbstractMailReceiver tries to fetch all the messages from the mail box. And looks like it takes a lot of time.
Another problem that with the .headerMapper(mailHeaderMapper() a Mail.pop3InboundAdapter does not produce a Message but rather byte[]. So, your .handle((payload, header) -> logMail(payload)) not only bad by the request-reply definition in the end of flow, but also fails with ClassCast like this, because you expect the type which is not produced for you:
Caused by: java.lang.ClassCastException: class [B cannot be cast to class org.springframework.messaging.Message ([B is in module java.base of loader 'bootstrap'; org.springframework.messaging.Message is in unnamed module of loader 'app')
at com.firm.demo.EmailReceiver.logMail(EmailReceiver.java:59)
at com.firm.demo.EmailReceiver.lambda$pop3MailFlow$2(EmailReceiver.java:53)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
Well, no: better to say you are missing the fact that this signature .handle((payload, header) -> logMail(payload)) deals with the payload not the whole message. So, your expectations in the logMail() are wrong. It has to be byte[]. And consider to have a one-way handler over there in the end of flow anyway.
UPDATE
The working code is like this:
#Log4j2
#Configuration
#EnableIntegration
public class EmailReceiver {
#Autowired
private PollableChannel pop3Channel;
private MailReceiver receiver;
#Bean
public PollableChannel receivedChannel() {
return new QueueChannel();
}
#Bean
public IntegrationFlow pop3MailFlow() {
return IntegrationFlows
.from(Mail.pop3InboundAdapter("pop.gmail.com", 995, "userName", "password")
.javaMailProperties(p -> {
p.put("mail.debug", "false");
p.put("mail.pop3.socketFactory.fallback", "false");
p.put("mail.pop3.port", 995);
p.put("mail.pop3.socketFactory.class", "javax.net.ssl.SSLSocketFactory");
p.put("mail.pop3.socketFactory.port", 995);
})
.maxFetchSize(1)
.headerMapper(mailHeaderMapper()),
e -> e.poller(Pollers.fixedRate(5000).maxMessagesPerPoll(1)))
.handle(this, "logMail")
.get();
}
public void logMail(String payload) {
log.info("*******Email[TEST]********* \n" + payload);
}
#Bean
public HeaderMapper<MimeMessage> mailHeaderMapper() {
return new DefaultMailHeaderMapper();
}
}

Kafka Consumer Unable To Resolve Listener Method Intermittently

I have been facing the exception below on the Kafka consumer side. Surprisingly, this issue is not consistent and an older version of the code (with the exact same configuration but some new unrelated features) works as expected. Could anyone help in determining what could be causing this?
[ERROR][938f3c68-f481-4224-b2c6-43af5fb27ada-0-C-1][org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer] - Error handler threw an exception
org.springframework.kafka.KafkaException: Seek to current after exception; nested exception is org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method could not be invoked with the incoming message
Endpoint handler details:
Method [public void com.mycompany.listener.KafkaBatchListener.onMessage(java.lang.Object,org.springframework.kafka.support.Acknowledgment)]
Bean [com.mycompany.listener.KafkaBatchListener#7a59780b]; nested exception is org.springframework.messaging.handler.invocation.MethodArgumentResolutionException: Could not resolve method parameter at index 0 in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): Could not resolve parameter [0] in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): No suitable resolver, failedMessage=GenericMessage [payload=[[B#21bc784f, MyPOJO(), [B#33bb5851], headers={kafka_offset=[4046, 4047, 4048], kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#4871203f, kafka_timestampType=[CREATE_TIME, CREATE_TIME, CREATE_TIME], kafka_receivedPartitionId=[0, 0, 0], kafka_receivedMessageKey=[[B#295620f1, MyPOJOKey(id=0), [B#5d3d6361], kafka_batchConvertedHeaders=[{myFirstHeader=[B#1f011689, myUUIDHeader=[B#7691bce8, myMetadataHeader=[B#6e585b63, myRequestIdHeader=[B#58c81ba2, myMetricsHeader=[B#4f6aeb6c, myTargetHeader=[B#34677895}, {myUUIDHeader=[B#1848ae39, myMetadataHeader=[B#c5b399, myRequestIdHeader=[B#186c1966, myMetricsHeader=[B#1740692e, myTargetHeader=[B#4a242499}, {myUUIDHeader=[B#67d01f3f, myMetadataHeader=[B#1f0f9d8a, myRequestIdHeader=[B#b928e5c, isLastMessage=[B#6079735b, myMetricsHeader=[B#7b7b18c, myTargetHeader=[B#64378f3d}], kafka_receivedTopic=[my_topic, my_topic, my_topic], kafka_receivedTimestamp=[1623420136620, 1623420137255, 1623420137576], kafka_acknowledgment=Acknowledgment for org.apache.kafka.clients.consumer.ConsumerRecords#7bc81d89, kafka_groupId=dev-consumer-grp}]
at org.springframework.kafka.listener.SeekToCurrentBatchErrorHandler.handle(SeekToCurrentBatchErrorHandler.java:77) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.RecoveringBatchErrorHandler.handle(RecoveringBatchErrorHandler.java:124) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.ContainerAwareBatchErrorHandler.handle(ContainerAwareBatchErrorHandler.java:56) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchErrorHandler(KafkaMessageListenerContainer.java:2010) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:1854) [spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListener(KafkaMessageListenerContainer.java:1720) [spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:1699) [spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeIfHaveRecords(KafkaMessageListenerContainer.java:1272) [spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1264) [spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1161) [spring-kafka-2.7.1.jar:2.7.1]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [?:?]
at java.util.concurrent.FutureTask.run(FutureTask.java:264) [?:?]
at java.lang.Thread.run(Thread.java:834) [?:?]
Caused by: org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method could not be invoked with the incoming message
Endpoint handler details:
Method [public void com.mycompany.listener.KafkaBatchListener.onMessage(java.lang.Object,org.springframework.kafka.support.Acknowledgment)]
Bean [com.mycompany.listener.KafkaBatchListener#7a59780b]; nested exception is org.springframework.messaging.handler.invocation.MethodArgumentResolutionException: Could not resolve method parameter at index 0 in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): Could not resolve parameter [0] in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): No suitable resolver, failedMessage=GenericMessage [payload=[[B#21bc784f, MyPOJO(), [B#33bb5851], headers={kafka_offset=[4046, 4047, 4048], kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#4871203f, kafka_timestampType=[CREATE_TIME, CREATE_TIME, CREATE_TIME], kafka_receivedPartitionId=[0, 0, 0], kafka_receivedMessageKey=[[B#295620f1, MyPOJOKey(id=0), [B#5d3d6361], kafka_batchConvertedHeaders=[{myFirstHeader=[B#1f011689, myUUIDHeader=[B#7691bce8, myMetadataHeader=[B#6e585b63, myRequestIdHeader=[B#58c81ba2, myMetricsHeader=[B#4f6aeb6c, myTargetHeader=[B#34677895}, {myUUIDHeader=[B#1848ae39, myMetadataHeader=[B#c5b399, myRequestIdHeader=[B#186c1966, myMetricsHeader=[B#1740692e, myTargetHeader=[B#4a242499}, {myUUIDHeader=[B#67d01f3f, myMetadataHeader=[B#1f0f9d8a, myRequestIdHeader=[B#b928e5c, isLastMessage=[B#6079735b, myMetricsHeader=[B#7b7b18c, myTargetHeader=[B#64378f3d}], kafka_receivedTopic=[my_topic, my_topic, my_topic], kafka_receivedTimestamp=[1623420136620, 1623420137255, 1623420137576], kafka_acknowledgment=Acknowledgment for org.apache.kafka.clients.consumer.ConsumerRecords#7bc81d89, kafka_groupId=dev-consumer-grp}]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.decorateException(KafkaMessageListenerContainer.java:2367) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:2003) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessageWithRecordsOrList(KafkaMessageListenerContainer.java:1973) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1925) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:1837) ~[spring-kafka-2.7.1.jar:2.7.1]
... 8 more
Caused by: org.springframework.messaging.handler.invocation.MethodArgumentResolutionException: Could not resolve method parameter at index 0 in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): Could not resolve parameter [0] in public void com.mycompany.listener.KafkaBatchListener.onMessage(java.util.List<org.apache.kafka.clients.consumer.ConsumerRecord<K, V>>,org.springframework.kafka.support.Acknowledgment): No suitable resolver
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.getMethodArgumentValues(InvocableHandlerMethod.java:145) ~[spring-messaging-5.2.12.RELEASE.jar:5.2.12.RELEASE]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:116) ~[spring-messaging-5.2.12.RELEASE.jar:5.2.12.RELEASE]
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:56) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:339) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:180) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:172) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:61) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1983) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessageWithRecordsOrList(KafkaMessageListenerContainer.java:1973) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1925) ~[spring-kafka-2.7.1.jar:2.7.1]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:1837) ~[spring-kafka-2.7.1.jar:2.7.1]
... 8 more
My app uses the following:
A custom listener class com.mycompany.listener.KafkaBatchListener<K, V> which implements org.springframework.kafka.listener.BatchAcknowledgingMessageListener<K, V> and overrides onMessage(List<ConsumerRecord<K, V>> consumerRecords, Acknowledgment acknowledgment) with a custom marker annotation #MyKafkaListener
A custom container factory which extends org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory<K, V> and configures setConsumerFactory(consumerFactory), setBatchErrorHandler(errorHandler), setBatchListener(true) and ContainerProperties.setOnlyLogRecordMetadata(true).
A SpringBoot #Configuration class which implements org.springframework.kafka.annotation.KafkaListenerConfigurer and is responsible for configuring org.springframework.kafka.core.DefaultKafkaConsumerFactory<K, V>, org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory<K, V> and org.springframework.kafka.config.MethodKafkaListenerEndpoint<String, String> (used by #MyKafkaListener)
Spring Kafka 2.7.1
Additional query:
Even though ContainerProperties.setOnlyLogRecordMetadata(true) is set, the exception stacktrace still contains the full payload which I have omitted. Any idea why?
Thanks in advance!
UPDATE:
KafkaBatchListener
package com.mycompany.listener;
import java.util.List;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.BatchAcknowledgingMessageListener;
import org.springframework.kafka.support.Acknowledgment;
public class KafkaBatchListener<K, V> implements BatchAcknowledgingMessageListener<K, V> {
#Override
#com.mycompany.listener.KafkaListener
public void onMessage(final List<ConsumerRecord<K, V>> consumerRecords, final Acknowledgment acknowledgment) {
// process batch using MyService<K, V>.process(consumerRecords)
acknowledgment.acknowledge();
}
}
Custom Annotation
package com.mycompany.listener;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
#Retention(RUNTIME)
#Target(METHOD)
public #interface KafkaListener {
}
Listener Container Factory
package com.mycompany.factory;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import com.mycompany.errorhandler.ListenerContainerRecoveringBatchErrorHandler;
public class KafkaBatchListenerContainerFactory<K, V>
extends ConcurrentKafkaListenerContainerFactory<K, V> {
public KafkaBatchListenerContainerFactory(final DefaultKafkaConsumerFactory<K, V> consumerFactory,
final ListenerContainerRecoveringBatchErrorHandler errorHandler, final int concurrency) {
super.setConsumerFactory(consumerFactory);
super.setBatchErrorHandler(errorHandler);
super.setConcurrency(concurrency);
super.setBatchListener(true);
super.setAutoStartup(true);
final ContainerProperties containerProperties = super.getContainerProperties();
containerProperties.setAckMode(ContainerProperties.AckMode.MANUAL);
containerProperties.setOnlyLogRecordMetadata(true);
}
}
Batch Error Handler
package com.mycompany.errorhandler;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.listener.RecoveringBatchErrorHandler;
import org.springframework.stereotype.Component;
import org.springframework.util.backoff.FixedBackOff;
#Component
public class ListenerContainerRecoveringBatchErrorHandler extends RecoveringBatchErrorHandler {
public ListenerContainerRecoveringBatchErrorHandler(
#Value("${spring.kafka.consumer.properties.backOffMS:0}") final int backOffTimeMS,
#Value("${spring.kafka.consumer.properties.retries:3}") final int retries) {
super(new FixedBackOff(backOffTimeMS, retries));
}
}
Kafka Listener Configurer
package com.mycompany.config;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListenerConfigurer;
import org.springframework.kafka.config.KafkaListenerEndpointRegistrar;
import org.springframework.kafka.config.MethodKafkaListenerEndpoint;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.messaging.handler.annotation.support.DefaultMessageHandlerMethodFactory;
import com.mycompany.errorhandler.ListenerContainerRecoveringBatchErrorHandler;
import com.mycompany.factory.KafkaBatchListenerContainerFactory;
import com.mycompany.listener.KafkaBatchListener;
#Configuration
public class KafkaBatchListenerConfigurer<K, V> implements KafkaListenerConfigurer {
private final List<KafkaBatchListener<K, V>> listeners;
private final BeanFactory beanFactory;
private final ListenerContainerRecoveringBatchErrorHandler errorHandler;
private final int concurrency;
#Autowired
public KafkaBatchListenerConfigurer(final List<KafkaBatchListener<K, V>> listeners, final BeanFactory beanFactory,
final ListenerContainerRecoveringBatchErrorHandler errorHandler,
#Value("${spring.kafka.listener.concurrency:1}") final int concurrency) {
this.listeners = listeners;
this.beanFactory = beanFactory;
this.errorHandler = errorHandler;
this.concurrency = concurrency;
}
#Override
public void configureKafkaListeners(final KafkaListenerEndpointRegistrar registrar) {
final Method listenerMethod = lookUpBatchListenerMethod();
listeners.forEach(listener -> {
registerListenerEndpoint(listener, listenerMethod, registrar);
});
}
private void registerListenerEndpoint(final KafkaBatchListener<K, V> listener, final Method listenerMethod,
final KafkaListenerEndpointRegistrar registrar) {
// final Map<String, Object> consumerConfig = get ConsumerConfig from a custom provider;
registrar.setContainerFactory(createContainerFactory(consumerConfig));
registrar.registerEndpoint(createListenerEndpoint(listener, listenerMethod, consumerConfig));
}
private KafkaBatchListenerContainerFactory<K, V> createContainerFactory(final Map<String, Object> consumerConfig) {
final DefaultKafkaConsumerFactory<K, V> consumerFactory = new DefaultKafkaConsumerFactory<>(consumerConfig);
final KafkaBatchListenerContainerFactory<K, V> containerFactory = new KafkaBatchListenerContainerFactory<>(
consumerFactory, errorHandler, concurrency);
return containerFactory;
}
private MethodKafkaListenerEndpoint<String, String> createListenerEndpoint(final KafkaBatchListener<K, V> listener,
final Method listenerMethod, final Map<String, Object> consumerConfig) {
final MethodKafkaListenerEndpoint<String, String> endpoint = new MethodKafkaListenerEndpoint<>();
endpoint.setId(UUID.randomUUID().toString());
endpoint.setBean(listener);
endpoint.setMethod(listenerMethod);
endpoint.setBeanFactory(beanFactory);
endpoint.setGroupId("my-group-id");
endpoint.setMessageHandlerMethodFactory(new DefaultMessageHandlerMethodFactory());
// final String topicName = get TopicName for this key-value from a custom utility;
endpoint.setTopics(topicName);
final Properties properties = new Properties();
properties.putAll(consumerConfig);
endpoint.setConsumerProperties(properties);
return endpoint;
}
private Method lookUpBatchListenerMethod() {
return Arrays.stream(com.mycompany.listener.KafkaBatchListener.class.getMethods())
.filter(m -> m.isAnnotationPresent(com.mycompany.listener.KafkaListener.class))
.findAny()
.orElseThrow(() -> new IllegalStateException(
String.format("[%s] class should have at least 1 method with [%s] annotation.",
com.mycompany.listener.KafkaBatchListener.class.getCanonicalName(),
com.mycompany.listener.KafkaListener.class.getCanonicalName())));
}
}
You don't need all the standard #KafkaListener method invoking infrastructure when your listener already implements one of the message listener interfaces; instead of registering endpoints for each listener, just create a container for each from the factory and add the listener to the container properties.
val container = containerFactory.createContainer("topic1");
container.getContainerProperties().set...
...
container.getContainerProperies().setMessageListener(myListenerInstance);
...
container.start();

Configuring Spring WebFlux WebClient to use a custom thread pool

Is it possible to configure WebClient to use a custom thread pool other than the reactor-http-nio thread pool (When using Netty)? If it is possible , can we somehow restrict that custom thread pool to run only on a particular processor core?
Yes. You can.
Create some where your own Thread Pool and EventLoopGroup (or create NioEventLoopGroup bean). For example:
{
Intger THREADS = 10;
BasicThreadFactory THREADFACTORY = new BasicThreadFactory.Builder()
.namingPattern("HttpThread-%d")
.daemon(true)
.priority(Thread.MAX_PRIORITY)
.build();
EXECUTOR = new ThreadPoolExecutor(
THREADS,
THREADS,
0L,
TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<>(),
THREADFACTORY,
new ThreadPoolExecutor.AbortPolicy());
NioEventLoopGroup RESOURCE= new NioEventLoopGroup(THREADS,EXECUTOR);
}
Register your own ReactorResourceFactory. And provide your own EventLoopGrooup based on custom thread Executor
#Bean
public ReactorResourceFactory reactorResourceFactory(NioEventLoopGroup RESOURCE) {
ReactorResourceFactory f= new ReactorResourceFactory();
f.setLoopResources(new LoopResources() {
#Override
public EventLoopGroup onServer(boolean b) {
return RESOURCE;
}
});
f.setUseGlobalResources(false);
return f;
}
Then register ReactorClientHttpConnector. In example below it is used custom SSL Context
#Bean
public ReactorClientHttpConnector reactorClientHttpConnector(ReactorResourceFactory r) throws SSLException {
SslContext sslContext = SslContextBuilder
.forClient()
.trustManager(InsecureTrustManagerFactory.INSTANCE)
.build();
return new ReactorClientHttpConnector(r, m -> m.secure(t -> t.sslContext(sslContext)));
}
Finally build WebClient
#Bean
public WebClient webClient(ReactorClientHttpConnector r) {
return WebClient.builder().clientConnector(r).build();
}
If you want to use same for WebServer. Do same configuration for ReactiveWebServerFactory.
#Bean
public ReactiveWebServerFactory reactiveWebServerFactory(NioEventLoopGroup RESOURCE) {
NettyReactiveWebServerFactory factory = new NettyReactiveWebServerFactory();
factory.addServerCustomizers(hs->hs.tcpConfiguration(s->s.runOn(RESOURCE)));
return factory;
}
Imports:
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.DependsOn;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import org.springframework.http.client.reactive.ReactorResourceFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.netty.resources.LoopResources;
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
import org.springframework.boot.web.embedded.netty.NettyReactiveWebServerFactory;
import org.springframework.boot.web.reactive.server.ReactiveWebServerFactory;
import java.util.concurrent.*;

Spring Integration HTTP Outbound Gateway Post Request with Java DSL

I am trying to consume a rest service and receive a json back and convert it to a list of objects. but I am receiving the below erorr. I am new to EIP and there aren't many tutorials for doing this in java dsl. I have configured 2 channels, one for sending a request and one for receiving the payload back.
Exception in thread "main" org.springframework.beans.factory.BeanNotOfRequiredTypeException: Bean named 'httpPostAtms' is expected to be of type 'org.springframework.messaging.MessageChannel' but was actually of type 'org.springframework.integration.dsl.StandardIntegrationFlow'
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:378)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:202)
at org.springframework.integration.support.channel.BeanFactoryChannelResolver.resolveDestination(BeanFactoryChannelResolver.java:89)
at org.springframework.integration.support.channel.BeanFactoryChannelResolver.resolveDestination(BeanFactoryChannelResolver.java:46)
at org.springframework.integration.gateway.MessagingGatewaySupport.getRequestChannel(MessagingGatewaySupport.java:344)
at org.springframework.integration.gateway.MessagingGatewaySupport.doSendAndReceive(MessagingGatewaySupport.java:433)
at org.springframework.integration.gateway.MessagingGatewaySupport.sendAndReceive(MessagingGatewaySupport.java:422)
at org.springframework.integration.gateway.GatewayProxyFactoryBean.invokeGatewayMethod(GatewayProxyFactoryBean.java:474)
at org.springframework.integration.gateway.GatewayProxyFactoryBean.doInvoke(GatewayProxyFactoryBean.java:429)
at org.springframework.integration.gateway.GatewayProxyFactoryBean.invoke(GatewayProxyFactoryBean.java:420)
at org.springframework.integration.gateway.GatewayCompletableFutureProxyFactoryBean.invoke(GatewayCompletableFutureProxyFactoryBean.java:65)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:213)
at com.sun.proxy.$Proxy70.getAllAtms(Unknown Source)
at com.backbase.atm.IngAtmApplication.main(IngAtmApplication.java:25)
I am using SI with Spring Boot
#IntegrationComponentScan
#Configuration
#EnableIntegration
#ComponentScan
public class InfrastructorConfig {
#Bean
public PollableChannel requestChannel() {
return new PriorityChannel() ;
}
#Bean
public MessageChannel replyChannel() {
return new DirectChannel() ;
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller() {
return Pollers.fixedRate(500).get();
}
#Bean
public IntegrationFlow httpPostAtms() {
return IntegrationFlows.from("requestChannel")
.handle(Http.outboundGateway("https://www.ing.nl/api/locator/atms/")
.httpMethod(HttpMethod.POST)
.extractPayload(true))
.<String, String>transform(p -> p.substring(5))
.transform(Transformers.fromJson(Atm[].class))
.channel("responseChannel")
.get();
}
}
The Gateway
package com.backbase.atm.service;
import java.util.List;
import org.springframework.integration.annotation.Gateway;
import org.springframework.integration.annotation.MessagingGateway;
import org.springframework.messaging.handler.annotation.Payload;
import com.backbase.atm.model.Atm;
#MessagingGateway
public interface IntegrationService {
#Gateway(requestChannel = "httpPostAtms")
#Payload("new java.util.Date()")
List<Atm> getAllAtms();
}
Application Start
package com.backbase.atm;
import java.util.ArrayList;
import java.util.List;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import com.backbase.atm.service.IntegrationService;
#SpringBootApplication
public class IngAtmApplication {
public static void main(String[] args) {
ConfigurableApplicationContext ctx = SpringApplication.run(IngAtmApplication.class, args);
ctx.getBean(IntegrationService.class).getAllAtms();
ctx.close();
}
You have to use requestChannel bean name in the gateway definition. Right now you have there an IntegrationFlow bean name, but that is wrong.
Always remember that everything in Spring Integration are connected via channels.

Resources