How to use ClassifierCompositeItemProcessor in Spring Batch and write data into same table for Insert and Upsert? - spring

I went through the link - https://github.com/spring-projects/spring-batch/blob/master/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/support/ClassifierCompositeItemProcessorTests.java, but did not strike much out of it.
I am trying to replace ETL Informatica mapping logic into the Batch. I am looking to separate out Status=I and Status=U into separate (Individual) processor and then further perform lookup and massage the data and then write those records directly into the table for Status=I and for status=U, perform another complex logic (like lookups, massaging and match and merge logic) and then upsert those records again into the same table.
I've tried to do POC, where I am looking to segregate the records in the processor
CustomerClassifier.java
public class CustomerClassifier implements Classifier<Customer, ItemProcessor<Customer, Customer>> {
private ItemProcessor<Customer, Customer> insertCustomerProcessor;
private ItemProcessor<Customer, Customer> updateCustomerProcessor;
public CustomerClassifier(ItemProcessor<Customer, Customer> evenCustomerProcessor, ItemProcessor<Customer, Customer> oddCustomerProcessor) {
this.insertCustomerProcessor= insertCustomerProcessor;
this.updateCustomerProcessor= updateCustomerProcessor;
}
#Override
public ItemProcessor<Customer, Customer> classify(Customer customer) {
return customer.getStatus().equals("I") ? insertCustomerProcessor : updateCustomerProcessor;
}
}
OddCustomerProcessor.java
public class OddCustomerProcessor implements ItemProcessor<Customer, Customer> {
#Override
public Customer process(Customer item) throws Exception {
Customer customer = new Customer();
// Perform some msaaging and lookups here
customer.setId(item.getId());
customer.setFirstName(item.getFirstName());
customer.setLastName(item.getLastName());
customer.setBirthdate(item.getBirthdate());
customer.setStatus(item.getStatus());
return customer;
}
}
EvenCustomerProcessor.java
public class EvenCustomerProcessor implements ItemProcessor<Customer, Customer> {
#Override
public Customer process(Customer item) throws Exception {
Customer customer = new Customer();
// Perform some msaaging and lookups here
customer.setId(item.getId());
customer.setFirstName(item.getFirstName());
customer.setLastName(item.getLastName());
customer.setBirthdate(item.getBirthdate());
customer.setStatus(item.getStatus());
return customer;
}
}
CustomLineAggregator.java
public class CustomLineAggregator implements LineAggregator<Customer> {
private ObjectMapper objectMapper = new ObjectMapper();
#Override
public String aggregate(Customer item) {
try {
return objectMapper.writeValueAsString(item);
} catch (Exception e) {
throw new RuntimeException("Unable to serialize Customer", e);
}
}
}
Customer.java
#Data
#AllArgsConstructor
#Builder
#NoArgsConstructor
public class Customer {
private Long id;
private String firstName;
private String lastName;
private String birthdate;
private String status;
}
Error-
The method setClassifier(Classifier<? super Customer,ItemProcessor<?,? extends Customer>>) in the type ClassifierCompositeItemProcessor<Customer,Customer> is not applicable for the
arguments (CustomerClassifier)
Configuration
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private DataSource dataSource;
#Bean
public JdbcPagingItemReader<Customer> customerPagingItemReader(){
// reading database records using JDBC in a paging fashion
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(1000);
reader.setRowMapper(new CustomerRowMapper());
// Sort Keys
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("id", Order.ASCENDING);
// MySQL implementation of a PagingQueryProvider using database specific features.
MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
queryProvider.setSelectClause("id, firstName, lastName, birthdate");
queryProvider.setFromClause("from customer");
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
public EvenCustomerProcessor evenCustomerProcessor() {
return new EvenCustomerProcessor();
}
#Bean
public OddCustomerProcessor oddCustomerProcessor() {
return new OddCustomerProcessor();
}
#Bean
public JdbcBatchItemWriter<Customer> customerItemWriter(){
JdbcBatchItemWriter<Customer> batchItemWriter = new JdbcBatchItemWriter<>();
batchItemWriter.setDataSource(dataSource);
batchItemWriter.setSql(""); // Query Goes here
return batchItemWriter;
}
#Bean
public ClassifierCompositeItemProcessor<Customer, Customer> classifierCustomerCompositeItemProcessor() throws Exception{
ClassifierCompositeItemProcessor<Customer, Customer> itemProcessor = new ClassifierCompositeItemProcessor<>();
itemProcessor.setClassifier(new CustomerClassifier(evenCustomerProcessor(), oddCustomerProcessor()));
}
#Bean
public Step step1() throws Exception {
return stepBuilderFactory.get("step1")
.<Customer, Customer> chunk(10)
.reader(customerPagingItemReader())
.processor(classifierCustomerCompositeItemProcessor())
.writer(customerItemWriter())
.build();
}
#Bean
public Job job() throws Exception {
return jobBuilderFactory.get("job")
.start(step1())
.build();
}
}

You can remove the CustomerClassifier and define the composite item processor as follows:
#Bean
public ClassifierCompositeItemProcessor<Customer, Customer> classifierCustomerCompositeItemProcessor(
EvenCustomerProcessor evenCustomerProcessor,
OddCustomerProcessor oddCustomerProcessor
) {
ClassifierCompositeItemProcessor<Customer, Customer> itemProcessor = new ClassifierCompositeItemProcessor<>();
itemProcessor.setClassifier(new Classifier<Customer, ItemProcessor<?, ? extends Customer>>() {
#Override
public ItemProcessor<?, ? extends Customer> classify(Customer customer) {
return customer.getStatus().equals("I") ? evenCustomerProcessor : oddCustomerProcessor;
}
});
return itemProcessor;
}
Then update your step definition as follows:
#Bean
public Step step1() throws Exception {
return stepBuilderFactory.get("step1")
.<Customer, Customer> chunk(10)
.reader(customerPagingItemReader())
.processor(classifierCustomerCompositeItemProcessor(evenCustomerProcessor(), oddCustomerProcessor()))
.writer(customerItemWriter())
.build();
}

Related

How to run a job multiple times parallelly with different excels as input in spring batch

I have a use case where user upload different excel files where each file process parallelly and needs to save with job execution id for every row of the excel in h2 database.
But the issue I am facing here is when user uploads first file and the processing is going at back end and saving the excel data of every row with job id 1, but after if he uploading another excel file with out completing the first one with different data then the data related to first excel also getting saving with latest job execution id which is 2. So how to resolve this issue. so that each job data save with that particular id and has to run different jobs parlalley. this is the data for first excel sheet and this the data for second excel sheet and this is the output saving in h2 database.
This is the service class
#Service
public class BatchTestService {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
private JobLauncher batchJobLauncher;
#Autowired
private WriterImpl writer;
public Job job(byte data[]) {
return jobBuilderFactory.get("job")
.incrementer(new RunIdIncrementer())
.flow(step(data))
.end()
.build();
}
#SneakyThrows
public PoiItemReader<TestEntity> reader(byte[] data) {
ReaderImpl reader = new ReaderImpl();
reader.setLinesToSkip(1);
reader.setResource(toResource(data, "TEST"));
reader.setRowMapper(new MapperClass());
return reader;
}
public Step step(byte data[]) {
return stepBuilderFactory.get("step").<TestEntity, TestEntity>chunk(2).reader(reader(data))
.writer(writer)
.build();
}
public ThreadPoolTaskExecutor getThreadPoolTaskExecutor(){
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(2);
taskExecutor.setMaxPoolSize(4);
taskExecutor.setThreadNamePrefix("test");
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
public void uploadExcel(MultipartFile file)
throws Exception {
String jobId = String.valueOf(System.currentTimeMillis());
JobParameters parameters = new JobParametersBuilder().addString("jobId", jobId)
.toJobParameters();
((SimpleJobLauncher) batchJobLauncher).setTaskExecutor(getThreadPoolTaskExecutor());
batchJobLauncher.run(job(file.getBytes()), parameters);
}
public static Resource toResource(byte bytesFile[], String sheetName) throws IOException {
ByteArrayInputStream bin = new ByteArrayInputStream(bytesFile);
XSSFWorkbook workbook = new XSSFWorkbook(bin);
var outputStream = new ByteArrayOutputStream();
workbook.write(outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
}
This is the config class.
#Configuration
public class BatchDataSourceConfig {
#Value("${spring.datasource.driver-class-name}")
private String driverName;
#Value("${spring.datasource.url}")
private String url;
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverName);
dataSource.setUrl(url);
dataSource.setUsername("sa");
dataSource.setPassword("");
return dataSource;
}
#Bean
public JobLauncher batchJobLauncher(JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
}
This is reader class
public class ReaderImpl extends PoiItemReader<TestEntity> {}
This is writer class
#Component
public class WriterImpl implements ItemWriter<TestEntity> {
private static Logger logger = LoggerFactory.getLogger(WriterImpl.class);
#Autowired
private TestEntityRepository testEntityRepository;
private StepExecution stepExecution;
#BeforeStep
public void beforeStep(final StepExecution stepExecution) {
this.stepExecution = stepExecution;
}
#Override
#SneakyThrows
public void write(List<? extends TestEntity> modelObjectList) {
logger.info("Writer is reached...");
Thread.sleep(3000);
for (TestEntity testEntity : modelObjectList) {
testEntity.setJobExecutionId(stepExecution.getJobExecutionId());
testEntityRepository.save(testEntity);
}
}
}
And also resprctive rowmapper class is also defined.
public class MapperClass implements RowMapper<TestEntity> {
#Override
public TestEntity mapRow(RowSet rowSet) {
TestEntity testEntity = new TestEntity();
testEntity.setStudentName(rowSet.getColumnValue(0));
testEntity.setRollNo(rowSet.getColumnValue(1));
testEntity.setSection(rowSet.getColumnValue(2));
return testEntity;
}
}
This is the model class
#AllArgsConstructor
#Data
#Entity
#NoArgsConstructor
#Table(name = "TEST_ENTITY")
public class TestEntity {
#GeneratedValue(strategy = GenerationType.AUTO)
#Id
private Integer id;
private String studentName;
private String rollNo;
private String section;
private Long jobExecutionId;
}

How can we take the result of `MethodInvokingTaskletAdapter` as a reader in the Spring Batch Step?

How can we take the result of MethodInvokingTaskletAdapter as a reader in the Spring Batch Step? Reference - https://docs.spring.io/spring-batch/docs/current/reference/html/index-single.html#taskletStep and https://github.com/spring-projects/spring-batch/pull/567
Here is the code that I developed
JobConfiguration.java
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public CustomService service() {
return new CustomService();
}
#StepScope
#Bean
public MethodInvokingTaskletAdapter methodInvokingTasklet() {
MethodInvokingTaskletAdapter methodInvokingTaskletAdapter = new MethodInvokingTaskletAdapter();
methodInvokingTaskletAdapter.setTargetObject(service());
methodInvokingTaskletAdapter.setTargetMethod("getEmployees");
return methodInvokingTaskletAdapter;
}
#Bean
public Job methodInvokingJob() {
return this.jobBuilderFactory.get("methodInvokingJob")
.start(methodInvokingStep())
.build();
}
#Bean
public Step methodInvokingStep() {
// Looking to configure the Chunk based Step here, dont know how to do using MethodInvokingTaskletAdapter
return this.stepBuilderFactory.get("methodInvokingStep")
.tasklet(methodInvokingTasklet())
.build();
}
}
CustomService.java
public class CustomService {
public void serviceMethod(String message) {
System.out.println(message);
}
public void invokeMethod() {
System.out.println("=============== Your method has executed !");
}
public List<Employee> getEmployees(){
// In real world, it will be an GET API call to XYZ system
List<Employee> employees = new ArrayList<>();
employees.add(Employee.builder().firstName("Ravi").lastName("Shankar").email("ravi.shankar#gmail.com").age(30).build());
employees.add(Employee.builder().firstName("Parag").lastName("Rane").email("parag.rane#gmail.com").age(11).build());
employees.add(Employee.builder().firstName("Priya").lastName("Pande").email("priya.pande#gmail.com").age(40).build());
employees.add(Employee.builder().firstName("Kiran").lastName("khot").email("kiran.khot#gmail.com").age(50).build());
return employees;
}
}
Employee.java
#Data
#AllArgsConstructor
#NoArgsConstructor
#Builder
public class Employee {
private String firstName;
private String lastName;
private String email;
private int age;
}
MethodInvokingTaskletApplication.java
#EnableBatchProcessing
#SpringBootApplication
public class MethodInvokingTaskletApplication {
public static void main(String[] args) {
SpringApplication.run(MethodInvokingTaskletApplication.class, args);
}
}
To answer your question, you can't. The MethodInvokingTaskletAdapter is meant to adapt a POJO to a Tasklet. We have an ItemReaderAdapter that you can use to adapt a POJO to an ItemReader. You can read about it in the documentation here: https://docs.spring.io/spring-batch/docs/current/api/org/springframework/batch/item/adapter/ItemReaderAdapter.html
Now you'll have an issue with your service as configured in that each call to the delegating POJO is considered an item. That means that your item as configured will be a List<Employee> instead of just an Employee. Given your configuration states it's not the real service, I'll assume that your real service should return an Employee per call and null once the results are exhausted.
To update your configuration (with your service as it is configured in your question) in your sample:
...
#StepScope
#Bean
public ItemReaderAdapter itemReader() {
ItemReaderAdapter reader = new ItemReaderAdapter();
reader.setTargetObject(service());
reader.setTargetMethod("getEmployees");
return reader;
}
#Bean
public Job methodInvokingJob() {
return this.jobBuilderFactory.get("methodInvokingJob")
.start(methodInvokingStep())
.build();
}
#Bean
public Step methodInvokingStep() {
return this.stepBuilderFactory.get("methodInvokingStep")
.<List<Employee>, List<Employee>>chunk(methodInvokingTasklet())
.reader(itemReader())
// You'll need to define a writer...
.writer(itemWriter())
.build();
}
...

Records are not written in files when invoked from BillerOrderWriter which implements ItemWriter in Spring Batch

I am trying to write successful records using one writer and failed records in another writer.
I have written BillerOrderWriter class which implements ItemWriter. I put some log statements and I can see it writes success billerOrderId or failed billerOrderId . But, it seems like it does not invoke DatabaseToCsvFileJobConfig or SuccessfulOrdersToCsvFileJobConfig .
public class BillerOrderWriter implements ItemWriter<BillerOrder>{
private static Logger log = LoggerFactory.getLogger("BillerOrderWriter.class");
#Autowired
SuccessfulOrdersToCsvFileJobConfig successfulOrdersToCsvFileJobConfig;
#Autowired
DatabaseToCsvFileJobConfig databaseToCsvFileJobConfig;
#Override
public void write(List<? extends BillerOrder> items) throws Exception {
for (BillerOrder item : items) {
log.info("item = " + item.toString());
if (item.getResult().equals("SUCCESS")) {
log.info(" Success billerOrderId = " + item.getBillerOrderId());
successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter();
} else {
log.info("Failed billerOrderId = " + item.getBillerOrderId());
databaseToCsvFileJobConfig.databaseCsvItemWriter();
}
}
}
}
Here is BatchConfig class.
#Bean
public BillerOrderWriter billerOrderWriter() {
return new BillerOrderWriter();
}
#Bean
public Job importJobOrder(JobCompletionNotificationListner listener, Step step1) {
return jobBuilderFactory.get("importJobOrder")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(step1)
.end()
.build();
}
#Bean(name="step1")
public Step step1(BillerOrderWriter billerOrderWriter) {
return stepBuilderFactory.get("step1")
.<BillerOrder, BillerOrder> chunk(10)
.reader((ItemReader<? extends BillerOrder>) reader())
.processor(processor())
.writer(billerOrderWriter)
.build();
}
Here is my successwriter and failedwriter class .
#Configuration
public class SuccessfulOrdersToCsvFileJobConfig {
private static Logger log = LoggerFactory.getLogger("SuccessfulOrdersToCsvFileJobConfig.class");
#Bean
public ItemWriter<BillerOrder> successfulDatabaseCsvItemWriter() {
log.info("Entering SuccessfulOrdersToCsvFileJobConfig...");
FlatFileItemWriter<BillerOrder> csvFileWriter = new FlatFileItemWriter<>();
String exportFileHeader = "BillerOrderId;SuccessMessage";
OrderWriter headerWriter = new OrderWriter(exportFileHeader);
csvFileWriter.setHeaderCallback(headerWriter);
String exportFilePath = "/tmp/SuccessBillerOrderIdForRetry.csv";
csvFileWriter.setResource(new FileSystemResource(exportFilePath));
LineAggregator<BillerOrder> lineAggregator = createOrderLineAggregator();
csvFileWriter.setLineAggregator(lineAggregator);
return csvFileWriter;
}
private LineAggregator<BillerOrder> createOrderLineAggregator() {
log.info("Entering createOrderLineAggregator...");
DelimitedLineAggregator<BillerOrder> lineAggregator = new DelimitedLineAggregator<>();
lineAggregator.setDelimiter(";");
FieldExtractor<BillerOrder> fieldExtractor = createOrderFieldExtractor();
lineAggregator.setFieldExtractor(fieldExtractor);
return lineAggregator;
}
private FieldExtractor<BillerOrder> createOrderFieldExtractor() {
log.info("Entering createOrderFieldExtractor...");
BeanWrapperFieldExtractor<BillerOrder> extractor = new BeanWrapperFieldExtractor<>();
extractor.setNames(new String[] {"billerOrderId","successMessage"});
return extractor;
}
}
#Configuration
public class DatabaseToCsvFileJobConfig {
private static Logger log = LoggerFactory.getLogger("DatabaseToCsvFileJobConfig.class");
#Bean
public ItemWriter<BillerOrder> databaseCsvItemWriter() {
log.info("Entering databaseCsvItemWriter...");
FlatFileItemWriter<BillerOrder> csvFileWriter = new FlatFileItemWriter<>();
String exportFileHeader = "BillerOrderId;ErrorMessage";
OrderWriter headerWriter = new OrderWriter(exportFileHeader);
csvFileWriter.setHeaderCallback(headerWriter);
String exportFilePath = "/tmp/FailedBillerOrderIdForRetry.csv";
csvFileWriter.setResource(new FileSystemResource(exportFilePath));
LineAggregator<BillerOrder> lineAggregator = createOrderLineAggregator();
csvFileWriter.setLineAggregator(lineAggregator);
return csvFileWriter;
}
private LineAggregator<BillerOrder> createOrderLineAggregator() {
log.info("Entering createOrderLineAggregator...");
DelimitedLineAggregator<BillerOrder> lineAggregator = new DelimitedLineAggregator<>();
lineAggregator.setDelimiter(";");
FieldExtractor<BillerOrder> fieldExtractor = createOrderFieldExtractor();
lineAggregator.setFieldExtractor(fieldExtractor);
return lineAggregator;
}
private FieldExtractor<BillerOrder> createOrderFieldExtractor() {
log.info("Entering createOrderFieldExtractor...");
BeanWrapperFieldExtractor<BillerOrder> extractor = new BeanWrapperFieldExtractor<>();
extractor.setNames(new String[] {"billerOrderId","errorMessage"});
return extractor;
}
}
Here is my job completion listener class.
#Component
public class JobCompletionNotificationListner extends JobExecutionListenerSupport {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(JobCompletionNotificationListner.class);
#Override
public void afterJob(JobExecution jobExecution) {
log.info("In afterJob ...");
if (jobExecution.getStatus() == BatchStatus.COMPLETED) {
DatabaseToCsvFileJobConfig databaseToCsvFileJobConfig = new DatabaseToCsvFileJobConfig();
SuccessfulOrdersToCsvFileJobConfig successfulOrdersToCsvFileJobConfig = new SuccessfulOrdersToCsvFileJobConfig();
}
}
}
In your BillerOrderWriter#write method, it is supposed to write code that does the actual write operation of items to a data sink. But in your case, you are calling successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter(); and databaseToCsvFileJobConfig.databaseCsvItemWriter(); which create item writer beans. You should inject those delegate writers and call their write method when needed, something like:
public class BillerOrderWriter implements ItemWriter<BillerOrder>{
private ItemWriter<BillerOrder> successfulDatabaseCsvItemWriter;
private ItemWriter<BillerOrder> databaseCsvItemWriter;
// constructor with successfulDatabaseCsvItemWriter + databaseCsvItemWriter
#Override
public void write(List<? extends BillerOrder> items) throws Exception {
for (BillerOrder item : items) {
if (item.getResult().equals("SUCCESS")) {
successfulDatabaseCsvItemWriter.write(Collections.singletonList(item));
} else {
databaseCsvItemWriter.write(Collections.singletonList(item));
}
}
}
}
Instead Of BillerOrderWriter, I wroter BillerOrderClassifier class.
public class BillerOrderClassifier implements Classifier<BillerOrder, ItemWriter<? super BillerOrder>> {
private static final long serialVersionUID = 1L;
private ItemWriter<BillerOrder> successItemWriter;
private ItemWriter<BillerOrder> failedItemWriter;
public BillerOrderClassifier(ItemWriter<BillerOrder> successItemWriter, ItemWriter<BillerOrder> failedItemWriter) {
this.successItemWriter = successItemWriter;
this.failedItemWriter = failedItemWriter;
}
#Override
public ItemWriter<? super BillerOrder> classify(BillerOrder billerOrder) {
return billerOrder.getResult().equals("SUCCESS") ? successItemWriter : failedItemWriter;
}
}
In BatchConfiguration, I wrote classifierBillerOrderCompositeItemWriter method.
#Bean
public ClassifierCompositeItemWriter<BillerOrder> classifierBillerOrderCompositeItemWriter() throws Exception {
ClassifierCompositeItemWriter<BillerOrder> compositeItemWriter = new ClassifierCompositeItemWriter<>();
compositeItemWriter.setClassifier(new BillerOrderClassifier(successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter(), databaseToCsvFileJobConfig.databaseCsvItemWriter()));
return compositeItemWriter;
}
#Bean(name="step1")
public Step step1() throws Exception{
return stepBuilderFactory.get("step1")
.<BillerOrder, BillerOrder> chunk(10)
.reader((ItemReader<? extends BillerOrder>) reader())
.processor(processor())
.writer(classifierBillerOrderCompositeItemWriter())
.stream(successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter())
.stream(databaseToCsvFileJobConfig.databaseCsvItemWriter())
.build();
}

The FlatFileItemReader read only one line from the CSV file - Spring Batch

I'm creating a Spring Batch Job to populate Data into a Database table from a given CSV file.
I created a customized FlatFileItemReader.
my problem is that the read() method is invoked only one time so only the first line of my CSV file is inserted into the database.
#Configuration
#EnableBatchProcessing
public class SpringBatchConfig {
private MultipartFile[] files;
#Bean
public Job job(JobBuilderFactory jobBuilderFactory, StepBuilderFactory stepBuilderFactory,
ItemReader<MyModelEntity> itemReader,
ItemWriter<MyModelEntity> itemWriter) {
Step step = stepBuilderFactory.get("Load-CSV-file_STP")
.<MyModelEntity, MyModelEntity > chunk(12)
.reader(itemReader)
.writer(itemWriter).build();
return jobBuilderFactory.get("Load-CSV-Files").
incrementer(new RunIdIncrementer()) /
.start(step)
.build();
}
#Bean
ItemReader<MyModelEntity> myModelCsvReader() throws Exception {
return new MyModelCsvReader();
}
}
the myModelCsvReader
#Component
#StepScope
public class MyModelCsvReader implements ItemReader<MyModelEntity>{
#Value("#{jobParameters['SDH']}")
private String sdhPath;
private boolean batchJobState= false;
#Autowired
MyModelFieldSetMapper myModelFieldSetMapper;
public LineMapper<MyModelEntity> lineMapper() throws Exception {
DefaultLineMapper<MyModelEntity> defaultLineMapper = new
DefaultLineMapper<MyModelEntity>();
DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer();
lineTokenizer.setDelimiter(",");
lineTokenizer.setStrict(false);
lineTokenizer.setNames(new String[]
{
"clientId","ddId","institName","progName",
"qual","startDate","endDate","eType", "country","comments"
});
defaultLineMapper.setLineTokenizer(lineTokenizer);
defaultLineMapper.setFieldSetMapper(myModelFieldSetMapper);
return defaultLineMapper;}
#Override
public MyModelEntity read()
throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException {
//if(!batchJobState )
{
FlatFileItemReader<MyModelEntity> flatFileItemReader = new
FlatFileItemReader<MyModelEntity>();
flatFileItemReader.setMaxItemCount(2000);
flatFileItemReader.setResource(new UrlResource("file:\\"+sdhPath));
flatFileItemReader.setName("CSV-Reader");
flatFileItemReader.setLinesToSkip(1);
flatFileItemReader.setLineMapper(lineMapper());
flatFileItemReader.open(new ExecutionContext());
batchJobState=true;
return flatFileItemReader.read();
}
// return null;
}
}
the FieldSetMapper Implementation
#Component
public class MyModelFieldSetMapper implements FieldSetMapper<MyModelEntity> {
//private SiteService siteService =BeanUtil.getBean(SiteServiceImpl.class);
#Autowired
private SiteService siteService;
#Override
public MyModelEntity mapFieldSet(FieldSet fieldSet ) throws BindException {
if(fieldSet == null){
return null;
}
MyModelEntity educationHistory = new MyModelEntity();
// setting MyModelAttributes Values
return myModel;
}
}
any conribution is welcomed . thanks
// thereader after extending FlatFileItemReader
#Component
#StepScope
public class CustomUserItemReader extends FlatFileItemReader<User> {
#Value("#{jobParameters['UserCSVPath']}")
private String UserCSVPath;
private boolean batchJobState;
public CustomUserItemReader() throws Exception {
super();
setResource(new UrlResource("file:\\"+UserCSVPath));
setLineMapper(lineMapper());
afterPropertiesSet();
setStrict(false);
}
public LineMapper<User> lineMapper() throws Exception {
DefaultLineMapper<User> defaultLineMapper =
new DefaultLineMapper<User>();
DelimitedLineTokenizer lineTokenizer = new DelimitedLineTokenizer();
lineTokenizer.setDelimiter(",");
lineTokenizer.setStrict(false);
lineTokenizer.setNames(new String[]{"name", "dept",
"salary","endDate"});
defaultLineMapper.setLineTokenizer(lineTokenizer);
defaultLineMapper.setFieldSetMapper(new CustomUserFieldSetMapper());
//defaultLineMapper.setFieldSetMapper(fieldSetMapper);
return defaultLineMapper;}
#Override
public User read()
throws Exception, UnexpectedInputException, ParseException,
NonTransientResourceException {
//if(!batchJobState )
{
flatFileItemReader).size())
// flatFileItemReader.setMaxItemCount(2000);
this.setResource(new UrlResource("file:\\"+UserCSVPath));
this.setName("CSV-Reader");
this.setLinesToSkip(1);
//flatFileItemReader.setLineMapper(lineMapper());
this.open(new ExecutionContext());
User e = this.read();
batchJobState = true;
return e ;
}
// return null;
}
public
String getUserCSVPath() {
return UserCSVPath;
}
public
void setUserCSVPath(String userCSVPath) {
UserCSVPath = userCSVPath;
}
}
Thanks for all your suggestions, even if i have implemented ItemReader<>. I fixed the problem by moving the instantiation of the FlatFileItemReader Out from the read() method.
that was creating a new FlatFileItemReader in each loop and reading only the first line for each object created .
thanks

axon org.axonframework.commandhandling.NoHandlerForCommandException: No node known to accept

When trying to implement a DistributedCommandBus using Spring Cloud, I am getting the following error intermittently. I have reason to believe that there is some sort of race condition happening with the auto-configuration of my aggregate root class, its command handlers, and my configuration bean class.
org.axonframework.commandhandling.NoHandlerForCommandException: No
node known to accept.
I am using Axon Version 3.3.5.
Here is my configurations class:
#Configuration
#AutoConfigureBefore(CustomerAggregate.class)
public class AxonConfig {
#Value("${mongo.servers}")
private String mongoUrl;
#Value("${mongo.db}")
private String mongoDbName;
#Value("${axon.events.collection.name}")
private String eventsCollectionName;
#Value("${axon.snapshot.collection.name}")
private String snapshotCollectionName;
#Value("${axon.saga.collection.name}")
private String sagaCollectionName;
#Bean
#Primary
public CommandGateway commandGateway(#Qualifier("distributedBus") DistributedCommandBus commandBus) throws Exception {
return new DefaultCommandGateway(commandBus, new IntervalRetryScheduler(Executors.newSingleThreadScheduledExecutor(), 1000, 10));
}
#Bean
#Primary
#Qualifier("springCloudRouter")
public CommandRouter springCloudCommandRouter(DiscoveryClient client, Registration localServiceInstance) {
return new SpringCloudCommandRouter(client, localServiceInstance, new AnnotationRoutingStrategy());
}
#Bean
#Primary
#Qualifier("springCloudConnector")
public SpringHttpCommandBusConnector connector() {
return new SpringHttpCommandBusConnector(new SimpleCommandBus(), new RestTemplate(), new JacksonSerializer());
}
#Bean
#Primary
#Qualifier("distributedBus")
public DistributedCommandBus springCloudDistributedCommandBus(#Qualifier("springCloudRouter") CommandRouter router) {
return new DistributedCommandBus(router, connector());
}
#Bean
#Primary
public AggregateFactory<CustomerAggregate> aggregateFactory(){
return new GenericAggregateFactory<CustomerAggregate>(CustomerAggregate.class);
}
#Bean
#Primary
public EventCountSnapshotTriggerDefinition countSnapshotTriggerDefinition(){
return new EventCountSnapshotTriggerDefinition(snapShotter(), 3);
}
#Bean
#Primary
public Snapshotter snapShotter(){
return new AggregateSnapshotter(eventStore(), aggregateFactory());
}
#Bean
#Primary
public EventSourcingRepository<CustomerAggregate> customerAggregateRepository(){
return new EventSourcingRepository<>(aggregateFactory(), eventStore(), countSnapshotTriggerDefinition());
}
#Bean(name = "axonMongoTemplate")
public MongoTemplate axonMongoTemplate() {
return new DefaultMongoTemplate(mongoClient(), mongoDbName)
.withDomainEventsCollection(eventsCollectionName)
.withSnapshotCollection(snapshotCollectionName)
.withSagasCollection(sagaCollectionName);
}
#Bean
public MongoClient mongoClient() {
MongoFactory mongoFactory = new MongoFactory();
mongoFactory.setMongoAddresses(Arrays.asList(new ServerAddress(mongoUrl)));
return mongoFactory.createMongo();
}
#Bean
#Primary
public MongoEventStorageEngine engine() {
return new MongoEventStorageEngine(new JacksonSerializer(), null, axonMongoTemplate(), new DocumentPerEventStorageStrategy());
}
#Bean
#Primary
public EventStore eventStore() {
return new EmbeddedEventStore(engine());
}
}
And here is my aggregate class with command handlers:
#Aggregate(repository = "customerAggregateRepository")
public class CustomerAggregate {
Logger logger = LoggerFactory.getLogger(this.getClass());
#AggregateIdentifier
private String id;
private String firstName;
private String lastName;
private String email;
private CustomerAggregate() {}
public String getId() {
return id;
}
public String getFirstName() {
return firstName;
}
public String getLastName() {
return lastName;
}
public String getEmail() {
return email;
}
#CommandHandler
public CustomerAggregate(CreateCustomer cmd) {
logger.debug("Received creation command: " + cmd.toString());
apply(new CustomerCreated(cmd.getId(),cmd.getFirstName(),cmd.getLastName(), cmd.getEmail()));
}
#CommandHandler
public void on(UpdateCustomer cmd) {
logger.debug("Received update command: " + cmd.toString());
apply(new CustomerUpdated(this.id,cmd.getFirstName(),cmd.getLastName(), cmd.getEmail()));
}
#CommandHandler
public void on(UpdateCustomerEmail cmd) {
logger.debug("Received update command for existing customer: " + cmd.toString());
apply(new CustomerUpdated(cmd.getId(), this.firstName, this.lastName, cmd.getEmail()));
}
// Various event handlers...
}
Any help is much appreciated.

Resources