Spring splitter/aggregator handling exceptions - spring

Version : spring-integration-core - 2.2.3
Here is the simplified version of my splitter/aggregator setup.
<task:executor id="taskExecutor" pool-size="${pool.size}"
queue-capacity="${queue.capacity}"
rejection-policy="CALLER_RUNS" keep-alive="120"/>
<int:channel id="service-requests"/>
<int:channel id="service-request"/>
<int:channel id="channel-1">
<int:dispatcher task-executor="taskExecutor" failover="false"/>
</int:channel>
<int:channel id="channel-2">
<int:dispatcher task-executor="taskExecutor" failover="false"/>
</int:channel>
<int:gateway id="myServiceRequestor" default-reply-timeout="${reply.timeout}"
default-reply-channel="service-aggregated-reply"
default-request-channel="service-request"
service-interface="com.blah.blah.MyServiceRequestor"/>
<int:splitter input-channel="service-request"
ref="serviceSplitter" output-channel="service-requests"/>
<!-- To split the request and return a java.util.Collection of Type1 and Type2 -->
<bean id="serviceSplitter" class="com.blah.blah.ServiceSplitter"/>
<int:payload-type-router input-channel="service-requests" resolution-required="true">
<int:mapping
type="com.blah.blah.Type1"
channel="channel-1"/>
<int:mapping
type="com.blah.blah.Type2"
channel="channel-2"/>
</int:payload-type-router>
<!-- myService is a bean where processType1 & processType2 method is there to process the payload -->
<int:service-activator input-channel="channel-1"
method="processType1" output-channel="service-reply" requires-reply="true"
ref="myService"/>
<int:service-activator input-channel="channel-2"
method="processType2" output-channel="service-reply" requires-reply="true"
ref="myService"/>
<int:publish-subscribe-channel id="service-reply" task-executor="taskExecutor"/>
<!-- myServiceAggregator has a aggregate method which takes a Collection as argument(aggregated response from myService) -->
<int:aggregator input-channel="service-reply"
method="aggregate" ref="myServiceAggregator"
output-channel="service-aggregated-reply"
send-partial-result-on-expiry="false"
message-store="myResultMessageStore"
expire-groups-upon-completion="true"/>
<bean id="myResultMessageStore" class="org.springframework.integration.store.SimpleMessageStore" />
<bean id="myResultMessageStoreReaper" class="org.springframework.integration.store.MessageGroupStoreReaper">
<property name="messageGroupStore" ref="myResultMessageStore" />
<property name="timeout" value="2000" />
</bean>
<task:scheduled-tasks>
<task:scheduled ref="myResultMessageStoreReaper" method="run" fixed-rate="10000" />
</task:scheduled-tasks>
If the processType1/processType2 method in mySevice throws a RuntimeException, then it tries to send the message to an error channel(i believe spring does it by default) and the message payload in error channel stays on in heap and not getting garbage collected.
Updated More Info:
For my comment on error channel. I debugged the code and found that ErrorHandlingTaskExecutor is trying to use a MessagePublishingErrorHandler which inturn sending the message to the channel returned by MessagePublishingErrorHandler.resolveErrorChannel method.
Code snippet from ErrorHandlingTaskExecutor.java
public void execute(final Runnable task) {
this.executor.execute(new Runnable() {
public void run() {
try {
task.run();
}
catch (Throwable t) {
errorHandler.handleError(t); /// This is the part which sends the message in to error channel.
}
}
});
}
Code snipper from MessagePublishingErrorHandler.java
public final void handleError(Throwable t) {
MessageChannel errorChannel = this.resolveErrorChannel(t);
boolean sent = false;
if (errorChannel != null) {
try {
if (this.sendTimeout >= 0) {
sent = errorChannel.send(new ErrorMessage(t), this.sendTimeout);
.....
When i take a heap dump, I always see the reference to the payload message(which i believe is maintained in the above channel) and not getting GC'ed.
Would like to know what is the correct way to handle this case or if i'm missing any in my config?
Also is it possible to tell spring to discard the payload(instead of sending it to error channel) in case of any exception thrown by the service activator method?
Looking forward for your inputs.
Thanks.

You don't have an error-channel defined on your gateway so we won't send it there, we'll just throw an exception to the caller.
However, the partial group is sitting in the aggregator and will never complete. You need to configure a MessageGroupStoreReaper as shown in the reference manual (or set a group-timeout in Spring Integration 4.0.x) to discard the partial group.

Related

Service Activator is not getting invoked

I am trying this use case:
Poll for a message in a queue->transform the message->Call a method with the transformed message.
Here is my code
<jms:message-driven-channel-adapter id="jmsIn"
destination-name="test"
channel="jmsInChannel"/>
<channel id="jmsInChannel"/>
<channel id="consoleOut"/>
<int:transformer input-channel="jmsInChannel" ref="xmlMsgToVORPojoTransformer" output-channel="consoleOut">
</int:transformer>
<beans:bean id="xmlMsgToVORPojoTransformer" class="com.order.jmspublisher.ValidateOrderMessageTransformer">
<beans:property name="unmarshaller" ref="marshaller" />
</beans:bean>
<beans:bean id="marshaller" class="org.springframework.oxm.jaxb.Jaxb2Marshaller">
<beans:property name="classesToBeBound">
<beans:list>
<beans:value>com.order.jmspublisher.ValidateOrderResponseType</beans:value>
</beans:list>
</beans:property>
</beans:bean>
<logging-channel-adapter id="consoleOutloggerChannel" channel="consoleOut" log-full-message="true" level="DEBUG"/>
<int:service-activator id="sa" input-channel="consoleOut" output-channel="someChannel" method="handleVOR">
<beans:bean id="vorActivator" class="com.order.jmspublisher.VORServiceActivator"/>
</int:service-activator>
My Transformer code is as follows:
#SuppressWarnings("rawtypes")
public Message transform(String message)
{
try {
XMLInputFactory xif = XMLInputFactory.newFactory();
XMLStreamReader xsr = xif.createXMLStreamReader(new StreamSource(new StringReader(message)));
xsr.nextTag(); // Advance to Envelope tag
while(xsr.hasNext() ) {
int next = xsr.next();
if(next == XMLStreamReader.START_ELEMENT)
if(xsr.getLocalName().equals("ValidateOrderResponse"))
{
break;
}
}
ValidateOrderResponseType vor = (ValidateOrderResponseType)marshaller.unmarshal(new StAXSource(xsr));
return MessageBuilder.withPayload(vor).build();
} catch (XmlMappingException e) {
return MessageBuilder.withPayload(e).build();
} catch(Exception e){
return MessageBuilder.withPayload(e).build();
}
}
========================================================================
My service activator method code is as follows:
public class VORServiceActivator {
private static final Logger logger = LoggerFactory.getLogger(VORServiceActivator.class);
#ServiceActivator
public String handleVOR(ValidateOrderResponseType vor)
{
logger.info("vor information received and invoke some services here....\r\n"+vor);
return vor.toString();
}
}
=========================================================================
My service activator method is not getting called. But my transform is getting called and I can see the same in logs.
Please help me to know where I am going wrong.
Thanks in advance.
<channel id="consoleOut"/> is a DirectChannel; you have 2 consumers subscribed to the channel (logging adapter and service activator).
The default dispatcher will distribute messages to these competing consumers in round-robin fashion; so each will get alternate messages.
You need to change consoleOut to a <publish-subscribe-channel /> if you want both consumers to get all messages.
You can read about channel types here.

How to manually ack messages from rabbitmq using spring integration

[EDIT] Uploading complete configs:
rabbit.xml which dequeues from rabbit
<rabbit:connection-factory id="amqpConnectionFactoryInbound"
host="${rabbit.host}" port="${rabbit.port}"
username="${rabbit.username}" password="${rabbit.password}" channel-
cache-size="5"
connection-factory="rabbitConnectionFactoryInbound"/>
<beans:bean id="rabbitConnectionFactoryInbound"
class="com.rabbitmq.client.ConnectionFactory">
<beans:property name="requestedHeartbeat"
value="60" />
</beans:bean>
<!-- Inbound Adapter to AMQP RabbitMq and write to file -->
<int-amqp:inbound-channel-adapter id="rabbitMQInboundChannelAdapter"
channel="rabbitInboundMessageChannel"
concurrent-consumers="8" task-
executor="rabbit-executor" connection-
factory="amqpConnectionFactoryInbound"
message-converter="byteArrayToStringConverter" queue-
names="${rabbit.queue}" acknowledge-mode="MANUAL" error-
channel="errorChannelId"
prefetch-count="25" />
<header-enricher input-channel="rabbitInboundMessageChannel" output-
channel="rabbitOutboundboundMessageChannel">
<int:header name="Operation" value="${operation.rabbit}" />
<int:header name="GUID" expression="#{
'T(java.util.UUID).randomUUID().toString()' }" />
<int:header name="operationStartTime" expression="#{
'T(java.lang.System).currentTimeMillis()' }" />
</header-enricher>
<int:channel id="rabbitOutboundboundMessageChannel">
<int:interceptors>
<int:wire-tap channel="loggerChannel" />
</int:interceptors>
</int:channel>
<task:executor id="rabbit-executor" rejection-policy="CALLER_RUNS"
pool-size="10-30"
queue-capacity="25" />
</beans:beans>
The message is then sent to router channel: router.xml
<int:header-enricher input-channel="rabbitOutboundboundMessageChannel"
output-channel="routerChannel">
<int:header name="Operation" value="${operation.router}"
overwrite="true" />
<int:header name="file_name" expression="headers['GUID'] + '.xml'" />
<int:header name="operationStartTime" expression="#{
'T(java.lang.System).currentTimeMillis()' }"
overwrite="true" />
<int:error-channel ref="errorChannelId" />
</int:header-enricher>
<int:recipient-list-router id="rabbitMsgrouter" input-
channel="routerChannel">
<int:recipient channel="fileBackupChannel" selector-expression="new
String(payload).length()>0" />
<int:recipient channel="transformerChannel" />
</int:recipient-list-router>
<int:channel id="transformerChannel">
<int:interceptors>
<int:wire-tap channel="loggerChannel" />
</int:interceptors>
</int:channel>
<int:channel id="fileBackupChannel"/>
<int:channel id="loggerChannel"/>
</beans>
The message is now sent to persister.xml and transformer.xml. The following is persister.xml and I want to ack if persistence is successful. There are other downstream processes after transformer.xml
<int:header-enricher input-channel="fileBackupChannel" output-
channel="fileSaveChannel">
<int:header name="Operation" value="${operation.filePersister}"
overwrite="true" />
<int:header name="replyChannel" value="nullChannel" />
<int:header name="operationStartTime" expression="#{
'T(java.lang.System).currentTimeMillis()' }" />
<int:error-channel ref="errorChannelId" />
</int:header-enricher>
<int-file:outbound-gateway id="fileBackUpChannelAdapter"
directory="${file.location}"
request-channel="fileSaveChannel" reply-channel="rabbitAckChannel"/>
<int:service-activator input-channel="rabbitAckChannel" output-
channel="nullChannel" ref="ackRabbit" method="handleRabbitAcks" />
<bean id="ackRabbit"
class="com.expedia.dataloader.rabbit.RabbitAcknowledgement"/>
<int:channel id="rabbitAckChannel">
<int:interceptors>
<int:wire-tap channel="loggerChannel" />
</int:interceptors>
</int:channel>
<int:channel id="loggerChannel"/>
<int:channel id="fileSaveChannel"/>
</beans>
I'm having trouble manually acking payloads from rabbitmq.
This is my work flow:
1. Get message from rabbit using inbound-channel-adapter:
<int-amqp:inbound-channel-adapter id="rabbitMQInboundChannelAdapter"
channel="rabbitInboundMessageChannel"
concurrent-consumers="${rabbit.concurrentConsumers}" task-
executor="rabbit-executor" connection-
factory="amqpConnectionFactoryInbound"
message-converter="byteArrayToStringConverter" queue-
names="${rabbit.queue}" acknowledge-mode="MANUAL" error-
channel="errorChannelId"
prefetch-count="${rabbit.prefetchCount}" />
2. Persist message to disk using outbound-gateway:
<int-file:outbound-gateway id="fileBackUpChannelAdapter"
directory="${file.location}"
request-channel="fileSaveChannel" reply-channel="loggerChannel" />
3. ack from rabbit when persister (step 2) succeeds.
for step (3), i wrote the following code:
public class RabbitAcknowledgement {
public void handleRabbitAcks(Message<?> message) throws IOException {
com.rabbitmq.client.Channel channel = (Channel)
message.getHeaders().get("amqp_channel");
long deliveryTag = (long) message.getHeaders().get("amqp_deliveryTag");
channel.basicAck(deliveryTag, false);
}
which I'm calling from spring via:
<int:service-activator input-
channel="rabbitOutboundboundMessageChannel" output-
channel="routerChannel" ref="ackRabbit" method="handleRabbitAcks" />
This doesn't work and the the rabbit payloads in my queue are not acked.
My questions are:
Do I need MANUAL ack in this scenario?
What am I doing wrong?
It should work fine; I just ran a quick test and it works for me...
#SpringBootApplication
public class So44666444Application implements CommandLineRunner {
public static void main(String[] args) {
SpringApplication.run(So44666444Application.class, args).close();
}
#Autowired
private RabbitTemplate template;
private final CountDownLatch latch = new CountDownLatch(1);
#Override
public void run(String... args) throws Exception {
this.template.convertAndSend("foo", "bar");
latch.await();
}
#Bean
public AmqpInboundChannelAdapter adapter(ConnectionFactory cf) {
AmqpInboundChannelAdapter adapter = new AmqpInboundChannelAdapter(listenerContainer(cf));
adapter.setOutputChannelName("ack");
return adapter;
}
#Bean
public AbstractMessageListenerContainer listenerContainer(ConnectionFactory cf) {
SimpleMessageListenerContainer container = new SimpleMessageListenerContainer(cf);
container.setAcknowledgeMode(AcknowledgeMode.MANUAL);
container.setQueueNames("foo");
return container;
}
#ServiceActivator(inputChannel = "ack")
public void ack(#Header(AmqpHeaders.CHANNEL) Channel channel, #Header(AmqpHeaders.DELIVERY_TAG) Long tag)
throws IOException {
System.out.println("Acking: " + tag);
channel.basicAck(tag, false);
latch.countDown();
}
}
If I set a breakpoint on the basicAck, I see the message as unacked on the console; stepping over to the next line and the message is removed.

Spring poller roll back on exception after a particular retry count

My poller fetches data from DB and pass it to a service activator.
If any exception happens in the service activator method,i should roll back the data fetched to its previous state and should again send the same data to the service activater only for a specific retry-count(say 3). **Is it possible to do this in a xml configuration. ? For details i will share the poller configurations and the service activator.
poller.xml
<int-jdbc:inbound-channel-adapter id="datachannel"
query="select loyalty_id,process_id,mobile_uid from TBL_RECEIPT where r_cre_time
=(select min(r_cre_time) from TBL_RECEIPT where receipt_status=0)"
data-source="dataSource" max-rows-per-poll="1"
update="update TBL_RECEIPT set receipt_status=11 where process_id in (:process_id)">
<int:poller fixed-rate="5000">
<int:transactional/>
</int:poller>
</int-jdbc:inbound-channel-adapter>
<int:channel id="errors">
<int:queue/>
</int:channel>
<bean id="poller" class="main.java.com.as.poller.PollerService" />
<int:channel id="executerchannel">
<int:dispatcher task-executor="taskExecutor" />
</int:channel>
<task:executor id="taskExecutor" pool-size="2" />
<int:service-activator input-channel="datachannel"
output-channel="executerchannel" ref="poller" method="getRecordFromPoller">
<int:request-handler-advice-chain>
<int:retry-advice recovery-channel="errors" />
</int:request-handler-advice-chain>
</int:service-activator>
<int:service-activator input-channel="executerchannel"
ref="poller" method="getDataFromExecuterChannel">
</int:service-activator>
service activator method
#SuppressWarnings({ "unchecked", "rawtypes" })
#ServiceActivator
public void processMessage(Message message) throws IOException {
int capLimit = Integer.parseInt(env.getProperty("capping_limit"));
List<Map<String, Object>> rows = (List<Map<String, Object>>) message
.getPayload();
for (Map<String, Object> row : rows) {
String loyaltyId = (String) row.get("loyalty_id");
String processId = (String) row.get("process_id");
String xid=(String)row.get("mobile_uid");
i have heard about int:transactional being used in poller configuration.But when i added it,its taking the same record even after successful transaction.(means its getting rolled back everytime).
Can anyone please help me on this ?
You can add a retry request-handler-advice to the service activator.
To make retry stateful (throw the exception so the transaction will rollback) you need to provide a RetryStateGenerator. Otherwise the thread will simply be suspended during the retries.
Regardless of using stateful or stateless retry, you really should use a transactional poller so that the update is only applied after success.
means its getting rolled back everytime
The transactional poller will only rollback if an exception is thrown. So if you're seeing the same row, something must be failing.
Turn on DEBUG logging for all of org.springframework to follow the message flow and transaction activity.

Spring integration chain error handling

Need help in error handling in a chain during splitter and aggregator flow for a synchronous channel.
Below is the Use case and it will be synchronous channel. So in the chain there will be a set of service activator to perform the business logic. Now if there is any exception in the service activator present in the chain, I want that to be handled in the chain itself and continue with the other splitted messages.
Inorder to do that, I have tried adding header enricher for error handler in the chain.But did not work. Any suggestion.
Object1 contains List < Object2 >
Flow:
List < Object1 > --> Splitter1 (for Object1) --> Splitter2 (for Object2) --> Chain --> Aggregator --> Aggregator
Code
<int:chain input-channel="ch3" output-channel="ch10" >
<int:header-enricher>
<int:error-channel ref="exception1" ></int:error-channel>
</int:header-enricher>
<int:service-activator ref="myService" method="method1"></int:service-activator>
<int:service-activator ref="myService" method="method2"></int:service-activator>
<int:service-activator ref="myService" method="method3"></int:service-activator>
<int:service-activator ref="myService" method="method4"></int:service-activator>
</int:chain>
<!-- Exception channel for chain and the output should go to the chain output channel -->
<int:chain input-channel="exception1" output-channel="ch10" >
<int:service-activator ref="exp" method="myException"></int:service-activator>
</int:chain>
Unfortunately it doesn't work that way. The error-channel header is for the asynchronous cases, too. It allows to override the default behavior in the MessagePublishingErrorHandler for PollableChannel and channels with Executor. In other words when we really can't do try...catch if talk in raw Java words.
So, to fix your requirements you really should rely on the try...catch function for that particular <service-activator>. It is called like ExpressionEvaluatingRequestHandlerAdvice and must be configured as on the <request-handler-advice-chain>.
In your case you should configure that Advice like:
<bean class="ExpressionEvaluatingRequestHandlerAdvice">
<property name="trapException" value="true"/>
<property name="onFailureExpression" value="#exception"/>
<property name="failureChannel" value="myErrorChannel"/>
</bean>
The trapException="true" allows do not re-throw the exception to the top level. In your case to the <splitter>.
The onFailureExpression says what to send to the failureChannel from the catch block.
The failureChannel is your desired error-channel to handle <service-activator> failures.
The source code looks like, by the way:
try {
Object result = callback.execute();
if (this.onSuccessExpression != null) {
this.evaluateSuccessExpression(message);
}
return result;
}
catch (Exception e) {
Exception actualException = this.unwrapExceptionIfNecessary(e);
if (this.onFailureExpression != null) {
Object evalResult = this.evaluateFailureExpression(message, actualException);
if (this.returnFailureExpressionResult) {
return evalResult;
}
}
if (!this.trapException) {
throw actualException;
}
return null;
}
Since we prevent re-throw with trapException="true", we end up on the return null. And having <service-activator> with null-payload loyalty we allow our <splitter> to go ahead with other messages.
HTH

Delete File after successful persist to MongoDB in Spring Integration

I have a Spring Integration flow that reads a csv file from a directory, splits the lines, then processes each line and extracts 2 objects from each line. These two objects are then send to two seperate int-mongodb:outbound-channel-adapter. I want to delete the incoming file after all of the lines have been processed and persisted. I have seen example of using the Transaction Manager to do this with the inbound adapter, but nothing with the outbound adapter. Is there a way to do this?
My config looks something like this:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd
http://www.springframework.org/schema/integration/file http://www.springframework.org/schema/integration/file/spring-integration-file.xsd
http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd
http://www.springframework.org/schema/integration/mongodb http://www.springframework.org/schema/integration/mongodb/spring-integration-mongodb.xsd
http://www.springframework.org/schema/data/mongo http://www.springframework.org/schema/data/mongo/spring-mongo.xsd"
xmlns:int="http://www.springframework.org/schema/integration"
xmlns:int-file="http://www.springframework.org/schema/integration/file"
xmlns:task="http://www.springframework.org/schema/task"
xmlns:int-mongodb="http://www.springframework.org/schema/integration/mongodb"
xmlns:mongo="http://www.springframework.org/schema/data/mongo">
<int:poller default="true" fixed-delay="50"/>
<int-file:inbound-channel-adapter id="filesInChannel"
directory="file:${file.ingest.directory}"
auto-create-directory="true">
<int:poller id="poller" fixed-rate="100">
</int:poller>
</int-file:inbound-channel-adapter>
<task:executor id="executor" pool-size="10" queue-capacity="50" />
<int:channel id="executorChannel">
<int:queue capacity="50"/>
</int:channel>
<int:splitter input-channel="filesInChannel" output-channel="executorChannel"
expression="T(org.apache.commons.io.FileUtils).lineIterator(payload)"/>
<int:service-activator id="lineParserActivator" ref="lineParser" method="parseLine"
input-channel="executorChannel" output-channel="lineChannel">
<int:poller task-executor="executor" fixed-delay="500">
</int:poller>
</int:service-activator>
<bean name="lineParser" class="com.xxx.LineParser"/>
<int:channel id="lineChannel">
<int:queue/>
</int:channel>
<int:channel id="lineMongoOutput">
<int:queue/>
</int:channel>
<int:channel id="actionMongoOutput">
<int:queue/>
</int:channel>
<int:transformer input-channel="lineChannel" output-channel="lineMongoOutput">
<bean id="lineTransformer" class="com.xxx.transformer.LineTransformer"></bean>
</int:transformer>
<int:transformer input-channel="lineChannel" output-channel="actionMongoOutput">
<bean id="actionTransformer" class="com.xxx.transformer.ActionTransformer"></bean>
</int:transformer>
<mongo:db-factory id="mongoDbFactory" dbname="${mongo.db.name}" password="${mongo.db.pass}" username="${mongo.db.user}" port="${mongo.db.port}" host="${mongo.db.host}"/>
<int-mongodb:outbound-channel-adapter id="lineMongoOutput"
collection-name="full"
mongodb-factory="mongoDbFactory" />
<int-mongodb:outbound-channel-adapter id="actionMongoOutput"
collection-name="action"
mongodb-factory="mongoDbFactory" />
</beans>
You can't really do it on the outbound adapter because you don't know when you're "done". Given you are asynchronously handing off to the downstream flow (via executors and queue channels), you can't do it on the inbound adapter either, because the poller thread will return to the adapter as soon as all the splits are sent.
Aside from that, I see some issues in your flow:
You seem to have an excessive amount of thread handoffs - you really don't need queue channels in the downstream flow because your executions are controlled by the exec. channel.
It is quite unusual to make every channel a QueueChannel.
Finally, you have 2 transformers subscribed to the same channel.
Do you realize that messages sent to lineChannel will alternate round-robin style.
Perhaps that is your intent, given your description, but it seems a little brittle to me; I would prefer to see the different data types going to different channels.
If you avoid using queue channels, and use gateways within your service activator to send out the data to the mongo adapters, your service activator would know when it is complete and be able to remove the file at that time.
EDIT:
Here is one solution (it writes to logs rather than mongo, but you should get the idea)...
<int-file:inbound-channel-adapter directory="/tmp/foo" channel="toSplitter">
<int:poller fixed-delay="1000">
<int:transactional synchronization-factory="sf" transaction-manager="ptxMgr" />
</int:poller>
</int-file:inbound-channel-adapter>
<int:transaction-synchronization-factory id="sf">
<int:after-commit expression="payload.delete()" />
<int:after-rollback expression="payload.renameTo(new java.io.File('/tmp/bad/' + payload.name))" />
</int:transaction-synchronization-factory>
<bean id="ptxMgr" class="org.springframework.integration.transaction.PseudoTransactionManager" />
<int:splitter input-channel="toSplitter" output-channel="processChannel">
<bean class="org.springframework.integration.file.splitter.FileSplitter" />
</int:splitter>
<int:service-activator input-channel="processChannel">
<bean class="foo.Foo">
<constructor-arg ref="gate" />
</bean>
</int:service-activator>
<int:gateway id="gate" service-interface="foo.Foo$Gate">
<int:method name="toLine" request-channel="toLine" />
<int:method name="toAction" request-channel="toAction" />
</int:gateway>
<int:channel id="toLine" />
<int:logging-channel-adapter channel="toLine" expression="'LINE:' + payload" level="WARN"/>
<int:channel id="toAction" />
<int:logging-channel-adapter channel="toAction" expression="'ACTION:' + payload" level="WARN"/>
.
public class Foo {
private final Gate gateway;
public Foo(Gate gateway) {
this.gateway = gateway;
}
public void parse(String payload) {
String[] split = payload.split(",");
if (split.length != 2) {
throw new RuntimeException("Bad row size: " + split.length);
}
this.gateway.toLine(split[0]);
this.gateway.toAction(split[1]);
}
public interface Gate {
void toLine(String line);
void toAction(String action);
}
}
.
#ContextConfiguration
#RunWith(SpringJUnit4ClassRunner.class)
public class FooTests {
#Test
public void testGood() throws Exception {
File file = new File("/tmp/foo/x.txt");
FileOutputStream fos = new FileOutputStream(file);
fos.write("foo,bar".getBytes());
fos.close();
int n = 0;
while(n++ < 100 && file.exists()) {
Thread.sleep(100);
}
assertFalse(file.exists());
}
#Test
public void testBad() throws Exception {
File file = new File("/tmp/foo/y.txt");
FileOutputStream fos = new FileOutputStream(file);
fos.write("foo".getBytes());
fos.close();
int n = 0;
while(n++ < 100 && file.exists()) {
Thread.sleep(100);
}
assertFalse(file.exists());
file = new File("/tmp/bad/y.txt");
assertTrue(file.exists());
file.delete();
}
}
Add a task executor to the <poller/> to process multiple files concurrently. Add a router as needed.

Resources