I am facing issue while testing kafka with camel. I used Embedded kafka with camel and here's what I tried
I have tried this example which tells us about testing kafka using embedded kafka
https://codenotfound.com/spring-kafka-embedded-unit-test-example.html
package com.codenotfound.kafka.producer;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertThat;
import static org.springframework.kafka.test.assertj.KafkaConditions.key;
import static org.springframework.kafka.test.hamcrest.KafkaMatchers.hasValue;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.main.Main;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.test.rule.EmbeddedKafkaRule;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
#RunWith(SpringRunner.class)
#SpringBootTest
#DirtiesContext
public class SpringKafkaSenderTest {
private static final Logger LOGGER = LoggerFactory.getLogger(SpringKafkaSenderTest.class);
private static String SENDER_TOPIC = "sender.t";
#Autowired
private Sender sender;
private KafkaMessageListenerContainer<String, String> container;
private BlockingQueue<ConsumerRecord<String, String>> records;
Object groupId;
Object bootstrapServers;
#ClassRule
public static EmbeddedKafkaRule embeddedKafka = new EmbeddedKafkaRule(1, true, SENDER_TOPIC);
#Before
public void setUp() throws Exception {
// set up the Kafka consumer properties
Map<String, Object> consumerProperties = KafkaTestUtils.consumerProps("sender", "false",
embeddedKafka.getEmbeddedKafka());
for (Entry<String, Object> entry : consumerProperties.entrySet()) {
System.out.println("Key = " + entry.getKey() + ", Value = " + entry.getValue());
if (entry.getKey().equals("group.id")) {
groupId = entry.getValue();
} else if (entry.getKey().equals("bootstrap.servers")) {
bootstrapServers = entry.getValue();
}
}
// create a Kafka consumer factory
DefaultKafkaConsumerFactory<String, String> consumerFactory = new DefaultKafkaConsumerFactory<String, String>(
consumerProperties);
// set the topic that needs to be consumed
ContainerProperties containerProperties = new ContainerProperties(SENDER_TOPIC);
// create a Kafka MessageListenerContainer
container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
// create a thread safe queue to store the received message
records = new LinkedBlockingQueue<>();
// setup a Kafka message listener
container.setupMessageListener(new MessageListener<String, String>() {
#Override
public void onMessage(ConsumerRecord<String, String> record) {
LOGGER.debug("test-listener received message='{}'", record.toString());
records.add(record);
}
});
// start the container and underlying message listener
container.start();
// wait until the container has the required number of assigned partitions
ContainerTestUtils.waitForAssignment(container, embeddedKafka.getEmbeddedKafka().getPartitionsPerTopic());
}
#After
public void tearDown() {
// stop the container
container.stop();
}
#Test
public void testCamelWithKafka() throws Exception {
String topicName = "topic=javainuse-topic";
String kafkaServer = "kafka:localhost:9092";
String zooKeeperHost = "zookeeperHost=localhost&zookeeperPort=2181";
String serializerClass = "serializerClass=kafka.serializer.StringEncoder";
String toKafka = new StringBuilder().append(kafkaServer).append("?").append(topicName).append("&")
.append(zooKeeperHost).append("&").append(serializerClass).toString();
String embedded = new StringBuilder().append(bootstrapServers).append("?").append(topicName).append("&")
// .append(embeddedKafka.getEmbeddedKafka().getZookeeperConnectionString())
.append(zooKeeperHost).append("&").append(serializerClass).toString();
Main main = new Main();
main.addRouteBuilder(new RouteBuilder() {
#Override
public void configure() throws Exception {
from("file:D://inbox//?noop=true").split().tokenize("\n").to("direct:embedded");
}
});
main.run();
ConsumerRecord<String, String> received =
records.poll(10, TimeUnit.SECONDS);
// assertThat(received, hasValue(greeting));
// AssertJ Condition to check the key
// assertThat(received).has(key(null));
// System.out.println(received);
}
}
Camel should able to read from a file and move the data to kafka and consumer should able to read it.
You can use the #Runwith(CamelSpringBootRunner.class) to run the test case.
Related
I want to implement a hibernate-based approach for developing read-only DB.
I have implemented the following code but when it comes to executing the
protected Object determineCurrentLookupKey() {
event though the request comes from the #Transactional(read-only=true) it always return "read-only=false" , Why transaction does not successfully process this ? Thanks.
DatabaseContextHolder.java
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class DatabaseContextHolder {
private static final Log log = LogFactory.getLog(DatabaseContextHolder.class);
private static final ThreadLocal<DatabaseEnvironment> CONTEXT = new ThreadLocal<>();
public static void set(DatabaseEnvironment databaseEnvironment) {
CONTEXT.set(databaseEnvironment);
}
public static DatabaseEnvironment getEnvironment() {
log.info("DB Environment : " + CONTEXT.get());
return CONTEXT.get();
}
public static void reset() {
CONTEXT.set(DatabaseEnvironment.WRITABLE);
}
}
DatabaseEnvironment.java
public enum DatabaseEnvironment {
WRITABLE,
READONLY
}
TransactionRoutingDataSource.java
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.lang.Nullable;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import io.api.readWriteRoutingHibernate.DataSourceConfiguration;
import io.api.readWriteRoutingHibernate.context.DatabaseEnvironment;
public class TransactionRoutingDataSource
extends AbstractRoutingDataSource {
private static final Log log = LogFactory.getLog(DataSourceConfiguration.class);
#Nullable
#Override
protected Object determineCurrentLookupKey() {
log.info("DatabaseEnvironment.READONLY : " + TransactionSynchronizationManager
.isCurrentTransactionReadOnly() );
return TransactionSynchronizationManager
.isCurrentTransactionReadOnly() ?
DatabaseEnvironment.READONLY :
DatabaseEnvironment.WRITABLE;
}
}
DataSourceConfiguration.java
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.zaxxer.hikari.HikariDataSource;
import io.api.readWriteRoutingHibernate.context.DatabaseEnvironment;
import io.api.readWriteRoutingHibernate.datasource.TransactionRoutingDataSource;
#Configuration
public class DataSourceConfiguration {
private static final Log log = LogFactory.getLog(DataSourceConfiguration.class);
#Value("${spring.datasource.master.url}")
private String mstUrl;
#Value("${spring.datasource.master.username}")
private String mstUsername;
#Value("${spring.datasource.master.password}")
private String mstPassword;
#Value("${spring.datasource.slave.url}")
private String slaveUrl;
#Value("${spring.datasource.slave.username}")
private String slaveUsername;
#Value("${spring.datasource.slave.password}")
private String slavePassword;
#Bean
public DataSource dataSource() {
TransactionRoutingDataSource masterSlaveRoutingDataSource = new TransactionRoutingDataSource();
Map<Object, Object> targetDataSources = new HashMap<>();
targetDataSources.put(DatabaseEnvironment.WRITABLE, masterDataSource());
targetDataSources.put(DatabaseEnvironment.READONLY, slaveDataSource());
masterSlaveRoutingDataSource.setTargetDataSources(targetDataSources);
// Set as all transaction point to master
masterSlaveRoutingDataSource.setDefaultTargetDataSource(masterDataSource());
return masterSlaveRoutingDataSource;
}
public DataSource slaveDataSource() {
HikariDataSource hikariDataSource = new HikariDataSource();
hikariDataSource.setJdbcUrl(slaveUrl);
hikariDataSource.setUsername(slaveUsername);
hikariDataSource.setPassword(slavePassword);
return hikariDataSource;
}
public DataSource masterDataSource() {
HikariDataSource hikariDataSource = new HikariDataSource();
hikariDataSource.setJdbcUrl(mstUrl);
hikariDataSource.setUsername(mstUsername);
hikariDataSource.setPassword(mstPassword);
return hikariDataSource;
}
protected Properties additionalProperties() {
Properties properties = new Properties();
properties.setProperty(
"hibernate.connection.provider_disables_autocommit",
Boolean.TRUE.toString()
);
return properties;
}
}
For the following test I am always getting the error:
org.opentest4j.AssertionFailedError: expected: 10 but was : 0
What exactly I am trying to verify in scope of the test:
I am trying to send 10 messages to Kafka and after that immediately I am trying to read those messages from Kafka, but for some unknown reason KafkaConsumer returns 0 records, and I am struggling to understand why Consumer can't read messages that were sent earlier?
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.ContextConfiguration;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.stream.IntStream;
#SpringBootTest
#ActiveProfiles("test")
#ContextConfiguration(initializers = TestKafkaContextInitializer.class)
#Slf4j
public class KafkaFlowVerificationITest {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Autowired
private KafkaProperties kafkaProperties;
private final String kafkaTopic = "test.topic";
#Test
void testKafkaFlow() {
IntStream.range(0, 10)
.forEach(e -> {
try {
kafkaTemplate.send(kafkaTopic, UUID.randomUUID().toString()).get();
} catch (InterruptedException | ExecutionException ex) {
ex.printStackTrace();
}
});
checkKafkaForMessage();
}
private void checkKafkaForMessage() {
Map<String, Object> properties = new HashMap<>();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringDeserializer.class);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringDeserializer.class);
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "acme");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties);
consumer.subscribe(List.of(kafkaTopic));
ConsumerRecords<String, String> records = consumer.poll(Duration.ZERO);
Assertions.assertThat(records.count()).isEqualTo(10);
}
}
and TestKafkaContextInitializer:
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.testcontainers.containers.KafkaContainer;
import org.testcontainers.utility.DockerImageName;
#Slf4j
public class TestKafkaContextInitializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
private final KafkaContainer kafkaContainer =
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"));
#Override
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
kafkaContainer.start();
var values = TestPropertyValues.of(
"spring.kafka.producer.bootstrap-servers=" + kafkaContainer.getBootstrapServers(),
"spring.kafka.consumer.bootstrap-servers=" + kafkaContainer.getBootstrapServers()
);
values.applyTo(configurableApplicationContext);
}
}
The root cause of the issue is:
I set TestContainer Kafka bootstrap server url for the following properties:
spring.kafka.producer.bootstrap-servers
spring.kafka.consumer.bootstrap-servers
but in the test I use:
spring.kafka.bootstrap-servers property, that why Consumer made attempt connect to localhost:9092 default URL instead of URL provided by TestContainer.
I want to listen to multiple (2) queues on the same queue manager from my spring boot (kotlin) app. I have the following MQ connection factory but not sure how to add the listeners for my 2 queues.
#Configuration
open class MqConfig {
#Value("\${jms.mq.port}")
private var port: Int = 0
#Value("\${jms.mq.channel}")
private var channel: String? = null
#Value("\${jms.mq.host}")
private var host: String? = null
#Value("\${jms.mq.manager}")
private var queueManager: String? = null
#Value("\${jms.mqA.queue}")
private var jmsMqA: String? = null
#Value("\${jms.mqB.queue}")
private var jmsMqB: String? = null
#Bean
open fun jmsMQConnectionFactory(): MQQueueConnectionFactory {
val mqQueueConnectionFactory = MQQueueConnectionFactory()
mqQueueConnectionFactory.port = port
mqQueueConnectionFactory.channel = channel
mqQueueConnectionFactory.hostName = host
mqQueueConnectionFactory.queueManager = queueManager
mqQueueConnectionFactory.transportType = WMQConstants.WMQ_CM_CLIENT;
mqQueueConnectionFactory.sslSocketFactory = sslSocketfactory;
}
As you can see in the code above, the two queues I want to listen to are jmsMqA and jmsMqB but not sure where/how to use them.
I am new to queues and MQ, so if anyone can point me in the right direction on how to implement a listener and (possibly) publisher for those 2 queues, that will be really helpful.
You can use #JmsListener to listen the queues. Here is the link.
For example:
#Component
class QueueAListener {
#JmsListener(
destination = "\${jms.mqA.queue}",
containerFactory = "jmsMQContainerFactory"
)
fun onMessage(message: Message) {
// yours logic
}
}
For queue B you can create new component or add method to the same class(don't forget to change method name).
UPD:
Container factory:
#Bean(name = "jmsMQContainerFactory")
public JmsListenerContainerFactory<?> topicJmsListenerContainerFactory() {
DefaultJmsListenerContainerFactory factory = new DefaultJmsListenerContainerFactory();
factory.setPubSubDomain(true);
factory.setConnectionFactory(jmsMQConnectionFactory());
return factory;
}
You can use 'JmsTemplate' to send messages to queues. Please, check this manual
Note: This answer is based on the sample in https://github.com/ibm-messaging/mq-dev-patterns/tree/master/Spring-JMS/src/main/java/com/ibm/mq/samples/jms/spring/level114
Your jmsMQConnectionFactory bean should look something like (this is java)
import com.ibm.mq.jms.MQConnectionFactory;
import com.ibm.mq.samples.jms.spring.globals.handlers.OurDestinationResolver;
import com.ibm.mq.samples.jms.spring.globals.handlers.OurMessageConverter;
import com.ibm.mq.spring.boot.MQConfigurationProperties;
import com.ibm.mq.spring.boot.MQConnectionFactoryFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.config.DefaultJmsListenerContainerFactory;
import org.springframework.jms.config.JmsListenerContainerFactory;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.support.QosSettings;
import javax.jms.DeliveryMode;
import javax.jms.JMSException;
#Configuration
public class MQConfiguration114 {
protected final Log logger = LogFactory.getLog(getClass());
#Bean
public MQConnectionFactory mqConnectionFactory() throws JMSException {
MQConfigurationProperties properties = new MQConfigurationProperties();
// Properties will be a mix of defaults, and those found in application.properties
// under ibm.mq
// Here we can override any of the properties should we need to
MQConnectionFactoryFactory mqcff = new MQConnectionFactoryFactory(properties,null);
MQConnectionFactory mqcf = mqcff.createConnectionFactory(MQConnectionFactory.class);
return mqcf;
}
#Bean
public JmsListenerContainerFactory<?> myContainerFactory114() throws JMSException {
DefaultJmsListenerContainerFactory factory = new DefaultJmsListenerContainerFactory();
factory.setConnectionFactory(mqConnectionFactory());
// ... any customisations go here
return factory;
}
// If you are going to be sending messages then
#Bean("myJmsTemplate114")
public JmsTemplate myJmsTemplate114() throws JMSException {
JmsTemplate jmsTemplate = new JmsTemplate(mqConnectionFactory());
// .. Any customisations go here
return jmsTemplate;
}
}
Your listener code would look something like:
package com.ibm.mq.samples.jms.spring.level114;
import com.ibm.mq.samples.jms.spring.globals.data.OurData;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.jms.annotation.JmsListener;
import org.springframework.stereotype.Component;
//#Component
public class MessageConsumer114 {
protected final Log logger = LogFactory.getLog(getClass());
#JmsListener(destination = "${queue.name1}", containerFactory = "myContainerFactory114")
public void receiveRequest(OurData message) {
logger.info("");
logger.info( this.getClass().getSimpleName());
logger.info("Received message of type: " + message.getClass().getSimpleName());
logger.info("Received message :" + message);
}
}
#JmsListener(destination = "${queue.name2}", containerFactory = "myContainerFactory114")
public void receiveRequest(OurData message) {
logger.info("");
logger.info( this.getClass().getSimpleName());
logger.info("Received message of type: " + message.getClass().getSimpleName());
logger.info("Received message :" + message);
}
}
Your sender logic would look like:
import com.ibm.mq.samples.jms.spring.globals.data.OurData;
import com.ibm.mq.samples.jms.spring.globals.handlers.OurMessageConverter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.stereotype.Service;
//#Service
public class SendMessageService114 {
protected final Log logger = LogFactory.getLog(getClass());
#Value("${queue.name1}")
public String sendQueue1;
#Value("${queue.name2}")
public String sendQueue2;
final private JmsTemplate myJmsTemplate114;
final private OurMessageConverter ourConverter = new OurMessageConverter();
SendMessageService114(JmsTemplate myJmsTemplate114) {
this.myJmsTemplate114 = myJmsTemplate114;
}
public void send1(OurData msg) {
logger.info("Sending Message");
myJmsTemplate114.convertAndSend(sendQueue1, msg);
}
public void send2(OurData msg) {
logger.info("Sending Message");
myJmsTemplate114.convertAndSend(sendQueue2, msg);
}
}
I don't know if it's possible, but this is my question:
I hava a batch developed using spring-boot and spring-batch, and I have to call another microservice using Feign...
...help!
this is my class Reader
package it.batch.step;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.NonTransientResourceException;
import org.springframework.batch.item.ParseException;
import org.springframework.batch.item.UnexpectedInputException;
import org.springframework.beans.factory.annotation.Autowired;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import it.client.feign.EmailAccountClient;
import it.dto.feign.mail.account.AccountOutDto;
import it.dto.feign.mail.account.SearchAccountFilterDto;
import it.dto.feign.mail.account.SearchAccountResponseDto;
public class Reader implements ItemReader <String> {
private static final Logger LOGGER = LoggerFactory.getLogger(Reader.class);
private int count = 0;
#Autowired
private EmailAccountClient emailAccountClient;
#Override
public String read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException {
LOGGER.info("read - begin ");
SearchAccountResponseDto clientResponse = emailAccountClient.searchAccount(getFilter());
if (count < clientResponse.getAccounts().size()) {
return convertToJsonString(clientResponse.getAccounts().get(count++));
} else {
count = 0;
}
return null;
}
private static SearchAccountFilterDto getFilter() {
SearchAccountFilterDto filter = new SearchAccountFilterDto();
return filter;
}
private String convertToJsonString(AccountOutDto account) {
ObjectMapper mapper = new ObjectMapper();
String jsonString = "";
try {
jsonString = mapper.writeValueAsString(account);
} catch (JsonProcessingException e) {
e.printStackTrace();
}
LOGGER.info("Contenuto JSON: " + jsonString);
return jsonString;
}
}
...
when I launch the batch I have this error:
java.lang.NullPointerException: null
at it.batch.step.Reader.read(Reader.java:32) ~[classes/:?]
where line 32 is:
SearchAccountResponseDto clientResponse = emailAccountClient.searchAccount(getFilter());
EmailAccountClient is null
Your client is null: your reader is not a #Component so Spring can't autowire the client. You must use a workaround like passing the autowired client through the constructor when you instantiate the reader, like this:
private EmailAccountClient client;
public reader(EmailAccountClient client){
this.client=client;
}
in the other class:
#Autowired
private EmailAccountClient client;
#Bean
public ItemReader<String> reader(){
return new Reader(client)
}
I am trying to configure inbound and outbound adaptors as provided in the spring batch remote partitioning samples for Manager and worker beans. Having difficulty since they are configured in context of AMQPConnectionFactory.
However when I follow spring integration samples, there is no class which can provide Connection Factory. Help appreciated.
Below is sample code:-
import com.microsoft.azure.spring.integration.core.DefaultMessageHandler;
import com.microsoft.azure.spring.integration.core.api.CheckpointConfig;
import com.microsoft.azure.spring.integration.core.api.CheckpointMode;
import com.microsoft.azure.spring.integration.servicebus.inbound.ServiceBusQueueInboundChannelAdapter;
import com.microsoft.azure.spring.integration.servicebus.queue.ServiceBusQueueOperation;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.integration.partition.RemotePartitioningManagerStepBuilderFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.util.concurrent.ListenableFutureCallback;
#Configuration
#IntegrationComponentScan
public class ManagerConfiguration {
private static final int GRID_SIZE = 3;
private static final String REQUEST_QUEUE_NAME = "digital.intg.batch.cm.request";
private static final String REPLY_QUEUE_NAME = "digital.intg.batch.cm.reply";
private static final String MANAGER_INPUT_CHANNEL = "manager.input";
private static final String MANGER_OUTPUT_CHANNEL = "manager.output";
private static final Log LOGGER = LogFactory.getLog(ManagerConfiguration.class);
private final JobBuilderFactory jobBuilderFactory;
private final RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory;
public ManagerConfiguration(JobBuilderFactory jobBuilderFactory,
RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory
) {
this.jobBuilderFactory = jobBuilderFactory;
this.managerStepBuilderFactory = managerStepBuilderFactory;
}
/*
* Configure outbound flow (requests going to workers)
*/
#Bean( name = MANGER_OUTPUT_CHANNEL )
public DirectChannel managerRequests() {
return new DirectChannel();
}
/*
* Configure inbound flow (replies coming from workers)
*/
#Bean( name = MANAGER_INPUT_CHANNEL )
public DirectChannel managerReplies() {
return new DirectChannel();
}
#Bean
public ServiceBusQueueInboundChannelAdapter managerQueueMessageChannelAdapter(
#Qualifier( MANAGER_INPUT_CHANNEL ) MessageChannel inputChannel, ServiceBusQueueOperation queueOperation) {
queueOperation.setCheckpointConfig(CheckpointConfig.builder().checkpointMode(CheckpointMode.MANUAL).build());
ServiceBusQueueInboundChannelAdapter adapter = new ServiceBusQueueInboundChannelAdapter(REPLY_QUEUE_NAME,
queueOperation);
adapter.setOutputChannel(inputChannel);
return adapter;
}
#Bean
#ServiceActivator( inputChannel = MANGER_OUTPUT_CHANNEL )
public MessageHandler managerQueueMessageSender(ServiceBusQueueOperation queueOperation) {
DefaultMessageHandler handler = new DefaultMessageHandler(REQUEST_QUEUE_NAME, queueOperation);
handler.setSendCallback(new ListenableFutureCallback<Void>() {
#Override
public void onSuccess(Void result) {
LOGGER.info("Manager Request Message was sent successfully.");
}
#Override
public void onFailure(Throwable ex) {
LOGGER.info("There was an error sending request message to worker.");
}
});
return handler;
}
#Bean
public IntegrationFlow managerOutboundFlow(MessageHandler managerQueueMessageSender) {
return IntegrationFlows
.from(managerRequests())
.handle(managerQueueMessageSender)
.get();
}
#Bean
public IntegrationFlow managerInboundFlow(ServiceBusQueueInboundChannelAdapter managerQueueMessageChannelAdapter) {
return IntegrationFlows
.from(managerQueueMessageChannelAdapter)
.channel(managerReplies())
.get();
}
/*
* Configure the manager step
*/
#Bean
public Step managerStep() {
return this.managerStepBuilderFactory.get("managerStep")
.partitioner("workerStep", new BasicPartitioner())
.gridSize(GRID_SIZE)
.outputChannel(managerRequests())
.inputChannel(managerReplies())
//.aggregator()
.build();
}
#Bean
public Job remotePartitioningJob() {
return this.jobBuilderFactory.get("remotePartitioningJob")
.start(managerStep())
.build();
}
}
The sample uses ActiveMQ because it is easily embeddable in a JVM for our tests and samples. But you can use any other broker that you want.
?? what should I inject here?
You should inject any dependency required by the queueMessageChannelAdapter handler:
.handle(queueMessageChannelAdapter)