Spring Batch not invoking ItemReader after Partitioner execution - spring

I would like configure SpringBatch using partitioner. Parttioner will partition a list of data into multiple chunks and ItemReader will process those data. But in my case after successfully invoking Parttioner it is not invoking ItemReader's read() method. Below are the code snippet and code. Could you please let me know whats wrong?
<batch:job id="cycleJob">
<batch:step id="step1">
<batch:partition partitioner="cyclePartitioner">
<batch:step>
<batch:tasklet task-executor="taskExecutor" throttle-limit="1">
<batch:chunk processor="itemProcessor" reader="itemReader" writer="itemWriter" commit-interval="10">
</batch:chunk>
</batch:tasklet>
</batch:step>
<batch:handler task-executor="taskExecutor" grid-size="${maxThreads}" />
</batch:partition>
</batch:step>
</batch:job>
<bean id="itemProcessor" class="com.navisys.besystem.batch.CycleItemProcessor">
<property name="transactionTemplate" ref="txTemplate"/>
<property name="processorType" value="Batch.executeCycle"/>
</bean>
<bean id="itemWriter" class="com.navisys.besystem.batch.CycleItemWriter" />
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor">
<constructor-arg type="java.lang.String" value="cycle-" />
<property name="concurrencyLimit" value="${maxThreads}" />
</bean>
<bean id="itemReader" scope="step" class="com.navisys.besystem.batch.CycleItemReader">
<property name="dao" ref="cycledao" />
<property name="cycleDate" value="${cycleDate}" />
<property name="batchIds" value="${batchIds}" />
<property name="switches" value="${switches}" />
<property name="workItemsPerMessage" value="${workItemsPerMessage}" />
<property name="policyMask" value="${policyMask}"></property>
<property name="mainFile" value="${mainFile}" />
<property name="datafileLocation" value="${datafileLocation}"></property>
<property name="data" value="#{stepExecutionContext['data']}" />
</bean>
<bean id="cyclePartitioner" class="com.navisys.besystem.batch.CyclePartitioner">
<property name="dao" ref="cycledao" />
<property name="cycleDate" value="${cycleDate}" />
<property name="batchIds" value="${batchIds}" />
<property name="currentSwitch" value="R"></property>
</bean>
public class CyclePartitioner implements Partitioner {
#Override
public Map<String, ExecutionContext> partition(int gridSize) {
final Map<String, ExecutionContext> contextMap = new HashMap<>();
List<BatchContractIdData> list = initialize();
int partionCount = 0;
int itemsPerList = (null == list || list.isEmpty())?1:(int)(Math.ceil(list.size()/gridSize));
for(List<BatchContractIdData> data:Lists.partition(list, itemsPerList)){
ExecutionContext context = new ExecutionContext();
context.put("data", new ArrayList<BatchContractIdData>(data));
contextMap.put(getPartitionName(++partionCount), context);
}
return contextMap;
}
}

Related

Spring batch jobs are not getting triggered after spring boot upgrade to 2.2

I have the below xml based batch configuration but the jobs are not getting executed. I do not see any error while bringing up the application, I suspect there is a misconfiguration which Im not able to figure out. The below is the configuration:
Application.java look like the below:
#SpringBootApplication(exclude= {ValidationAutoConfiguration.class, WebMvcAutoConfiguration.class })
#ImportResource("classpath:/application.xml")
#ComponentScan(value = "com.abc.xyz.app.configuration") //retrives dataSource
#EnableScheduling
#EnableTransactionManagement
public class Application extends SpringBootServletInitializer{
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
application.xml
<import resource="classpath:/app-batch-context.xml"/>
<import resource="classpath:/job_1.xml"/>
<import resource="classpath:/schdeuler_1.xml"/>
app-batch-context.xml has the below contents.
<bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry"/>
<bean id="jobLauncher" class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="appJobRepository"/>
</bean>
<bean id="jobExplorer" class="org.springframework.batch.core.explore.support.JobExplorerFactoryBean">
<property name="dataSource" ref="dataSource"/>
</bean>
<bean id="jobOperator" class="org.springframework.batch.core.launch.support.SimpleJobOperator">
<property name="jobExplorer" ref="jobExplorer"/>
<property name="jobLauncher" ref="jobLauncher"/>
<property name="jobRepository" ref="appJobRepository"/>
<property name="jobRegistry" ref="jobRegistry"/>
</bean>
<bean id="appJobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean">
<property name="dataSource" ref="dataSource"/>
<property name="lobHandler" ref="lobHandler"/>
<property name="serializer" ref="xStreamExecutionContextStringSerializer"/>
<property name="transactionManager" ref="transactionManager"/>
</bean>
<bean id="xStreamExecutionContextStringSerializer" class="org.springframework.batch.core.repository.dao.XStreamExecutionContextStringSerializer"/>
<bean id="lobHandler" class="org.springframework.jdbc.support.lob.DefaultLobHandler"/>
<!-- Wrapper bean to hold the applicationcontext, and make it accessible to all the objects-->
<bean id="appContext" class="com.app.core.AppContextInitializer"></bean>
<bean id="stepListner" class="com.app.core.step.APPStepExceutionListner"></bean>
<bean id="jobListner" class="com.app.core.job.APPJobExecutionListner"></bean>
<!-- abstract base Job, all jobs will extend this Job -->
<batch:job id="APPBaseJob" abstract="true" job-repository="appJobRepository">
<batch:listeners>
<batch:listener ref="jobListner"/>
</batch:listeners>
</batch:job>
<!-- abstract base Job, all the steps will extend this step-->
<batch:step id="abstractStep" abstract = "true">
<batch:listeners>
<batch:listener ref ="stepListner"/>
</batch:listeners>
</batch:step>
job_1.xml
<batch:job id="myTestJob" parent="APPBaseJob">
<batch:step id="myTestPreProcessorStep" next="myTestStopProcessingDecider">
<batch:tasklet ref="myTestPreProcessorTasklet"/>
</batch:step>
<batch:decision id="myTestStopProcessingDecider" decider="stopProcessingDecider">
<batch:next on="CONTINUE" to="myTestFileNameValidatorStep" />
<batch:end on="COMPLETED"/>
</batch:decision>
.
.
.
<batch:step id="myTestCustomInputValidatorStep" next="myTestTransformStep">
<batch:tasklet ref="myTestCustomInputValidatorTasklet"/>
</batch:step>
<batch:step id="myTestTransformStep" parent="abstractStep" next="myTestFileTransferStep">
<batch:tasklet>
<batch:chunk reader="myTestFileItemReader" processor="myTestXmlProcessor" writer="myTestItemWriter"
commit-interval="#{stepExecutionContext.get('APPBATCHCONTEXT').appBatch.toBeProcessedSize}"/>
</batch:tasklet>
</batch:step>
<batch:step id="myTestFileTransferStep" next="myTestPostProcessorStep">
<batch:tasklet ref="myTestFileTransferTasklet"/>
</batch:step>
</batch:job>
<!-- File Reader -->
<bean class="com.app.core.reader.v1.APPJaxbFileEntityReader" id="myFileItemReader" scope="step">
<property name="batchContext" value="#{stepExecutionContext.get('APPBATCHCONTEXT')}"></property>
<property name="packageName" value="com.abc.jaxb.xyz.extract"></property>
</bean>
<!-- File Content Writer-->
<bean class="com.app.core.writer.v1.APPXmlJaxbItemWriter" id="myItemWriter" scope="step">
<property name="batchContext" value="#{stepExecutionContext.get('APPBATCHCONTEXT')}"></property>
<property name="packageName" value="com.abc.jaxb.xyz.extract"></property>
</bean>
<bean id="myXmlProcessor" class="com.abc.app.xyz.customprocessor.XMLDocumentProcessor" scope="step">
<property name="batchContext" value="#{stepExecutionContext.get('APPBATCHCONTEXT')}"></property>
<property name="somePropertyDataService" ref="somePropertyDataService"/>
</bean>
<bean id="myFileTransferTasklet" class="com.abc.xyz.customsender.mySenderTasklet">
<property name="stepSkipDeciders" ref="skipStepDeciders"></property>
<property name="router" ref="myRouter"></property>
</bean>
<bean class="com.abc.xyz.app.customsender.ABCRouter"
id="myRouter"></bean>
<bean id="transactionManager" class="org.springframework.transaction.jta.WebSphereUowTransactionManager" />
Scheduler has the below info : Using a custom jobLauncher but in turn uses a org.springframework.batch.core.launch.JobLauncher to run the jobs.
<task:scheduler id="myScheduler" pool-size="1"/>
<bean id="myLauncher" class="com.abc.xyz.job.APPJobLauncher">
<property name="jobCode" value="abc"></property>
</bean>
<task:scheduled-tasks scheduler="abcScheduler">
<task:scheduled ref="myLauncher" method="startJob" cron="${abcFreq}"/>
</task:scheduled-tasks>
Job frequency will be in properties file with a cron expression * */5 * * * *
Please do let me know if any pointer is there that I should check.
Spring boot upgrade from 1.5.x to 2.2.x
Thanks in advance.
try simply with #EnableBatchProcessing in your Application.java
#SpringBootApplication(exclude= {ValidationAutoConfiguration.class, WebMvcAutoConfiguration.class })
#ImportResource("classpath:/application.xml")
#ComponentScan(value = "com.abc.xyz.app.configuration") //retrives dataSource
#EnableScheduling
#EnableBatchProcessing
#EnableTransactionManagement
public class Application extends SpringBootServletInitializer{
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}

Spring Batch partition doesnt work composite itemprocessor

I have a Spring Batch partition job. I’m using CompositeProcessor, read data from DB and save these items into an CopyOnWriteArrayList. Because the environment is concurrent but my CopyOnWriteArrayList is being utilized for other threads and mix information, I don’t know why and what I am doing bad, and the output writing them into files for each thread.
public class CustomerItemProcessor implements ItemProcessor<beangenerico,CopyOnWriteArrayList<beanCustomer>> {
private CustomerDAO customerDAO;
private CopyOnWriteArrayList<beanCustomer> listbean;
public CopyOnWriteArrayList<beanCustomer> process(beangenerico rangos) throws Exception {
listbean = customerDAO.getAccAgentes(rangos);
if(listbean != null) {
//customer.setId(currentCustomer.getId());
return listbean;
}else{
return null;
}
}
The configuration of my batch im XML:
<batch:job id="partitionJob" xmlns="http://www.springframework.org/schema/batch">
<batch:step id="masterStep">
<batch:partition step="slave" partitioner="rangePartitioner">
<batch:handler grid-size="10" task-executor="taskExecutor"/>
</batch:partition>
</batch:step>
</batch:job>
<!-- each thread will run this job, with different stepExecutionContext values. -->
<batch:step id="slave" xmlns="http://www.springframework.org/schema/batch">
<batch:tasklet task-executor="taskExecutor" throttle-limit="1">
<batch:chunk reader="beaniniendreader" writer="tempRecordsWriter" processor="completeItemProcessor" commit-interval="1" />
</batch:tasklet>
</batch:step>
<bean id="taskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="rangePartitioner" class="my.package.springbatch.RangePartitioner" />
<bean id="beaniniendreader" class="my.package.springbatch.FormiikReader" scope="step"></bean>
<bean id="beanprocessor" class="my.package.springbatch.FormiikProcessor" scope="step">
<property name="accountExecutiveDao" ref="accountExecutiveDao"/>
</bean>
<bean id="beanprocessor2" class="my.package.springbatch.CustomerItemProcessor" scope="step">
<property name="customerDAO" ref="customerAccDao"/>
</bean>
<bean id="completeItemProcessor" class="org.springframework.batch.item.support.CompositeItemProcessor">
<property name="delegates">
<list>
<ref bean="beanprocessor2"/>
<ref bean="accItemprocessor"/>
<ref bean="beanaccDataItem"/>
</list>
</property>
</bean>
<bean id="tempRecordsWriter" class="my.package.springbatch.ListDelegateWriter" scope="step">
<property name="delegate" ref="flatFileItemWriterPartition"/>
</bean>
<!-- csv file writer -->
<bean id="flatFileItemWriterPartition" class="org.springframework.batch.item.file.FlatFileItemWriter"
scope="step" >
<property name="resource"
value="file:csv/outputs/users.processed#{stepExecutionContext[fromId]}-#{stepExecutionContext[toId]}.csv" />
<property name="appendAllowed" value="false" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value="," />
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="cuenta, name, purchasedPackage" />
</bean>
</property>
</bean>
</property>
</bean>
I going back to take the subject of my code, it is advised to me using Threadlocal for storing thread-specific data whereby it´s works . Here I put my code again. Thanks for your replies.
public class CustomerItemProcessor implements ItemProcessor<beangenerico,ThreadLocal<CopyOnWriteArrayList<beanCustomer>>> {
private CustomerDAO customerDAO;
private ThreadLocal<CopyOnWriteArrayList<beanCustomer>> listbean = new ThreadLocal<CopyOnWriteArrayList<beanCustomer>>();
public ThreadLocal<CopyOnWriteArrayList<beanCustomer>> process(beangenerico rangos) throws Exception {
listbean.set(new CopyOnWriteArrayList<beanCustomer>());
listbean = customerDAO.getAccAgentes(rangos);
if(listbean != null) {
return listbean;
} else {
return null;
}
}
public void setCustomerDAO(CustomerDAO customerDAO) {
this.customerDAO = customerDAO;
}
}

Spring-Batch MongoDB : Skip functionality is not working properly in MongoItemReader scenario

Skip exception class functionality is not working properly when MongoItemreader is being used.
Issues:
1. Spring batch is stuck at a particular data row (in my case 15). It calls MessageContextReadConverter and onReadError for same row number 15 -- 3 times.
2. skiplistener is never invoked.
Scenario:
Suppose 20 rows are fetched from MongoDB and at row number 15 configurable exception ".ConversionFailedException" is thrown from a custom class derived from Converter, (MessageContextReadConverter implements Converter)
. MessageContextReadConverter is set during mongoTemplate creation,.
Now, exception thrown at row number 15 is received in custom readlistener (public void onReadError(Exception ex)).
Configuration XML:
<context:property-placeholder location="classpath:application.properties" />
<context:component-scan base-package="com.XXX.YYY.batch.kernel" />
<context:component-scan base-package="com.XXX.YYY.batch.dao" />
<context:annotation-config />
<!-- Enable Annotation based Declarative Transaction Management -->
<tx:annotation-driven proxy-target-class="true" transaction-manager="transactionManager" />
<!-- Creating TransactionManager Bean, since JDBC we are creating of type DataSourceTransactionManager -->
<bean id="transactionManager" class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="dataSource" />
</bean>
<batch:job id="txnLogJob" restartable="false">
<batch:step id="txnload">
<tasklet allow-start-if-complete="true" transaction-manager="transactionManager">
<chunk reader="txnLogReader" processor="txnLogProcessor"
writer="txnLogItemWriter" commit-interval="10" skip-limit="3">
<skippable-exception-classes>
<include class="org.springframework.core.convert.ConversionFailedException" />
</skippable-exception-classes>
</chunk>
<listeners>
<listener ref="stepListener" />
<listener ref="chunklistener" />
<listener ref="readlistener" />
<listener ref="skiplistener" />
</listeners>
</tasklet>
</batch:step>
<batch:listeners>
<batch:listener ref="completionListener" />
</batch:listeners>
</batch:job>
<bean id="txnLogReader" class="org.springframework.batch.item.data.MongoItemReader"
scope="step">
<property name="template" ref="mongoTemplate" />
<property name="query" value=" { }" />
<property name="pageSize" value="50" />
<property name="sort">
<map>
<entry key="audit_info.created_on"
value="#{T(org.springframework.data.domain.Sort.Direction).ASC}" />
</map>
</property>
<property name="collection" value="txnlog" />
<property name="targetType" value="com.XXX.YYY.kernel.msg.MessageContext" />
</bean>
<bean id="completionListener"
class="com.XXX.YYY.batch.listeners.JobCompletionNotificationListener" />
<bean id="stepListener"
class="com.XXX.YYY.batch.listeners.StepExecutionListener" />
<bean id="chunklistener"
class="com.XXX.YYY.batch.listeners.ChunkExecutionListener" />
<bean id="readlistener"
class="com.XXX.YYY.batch.listeners.ReadExecutionListener" />
<bean id="skiplistener"
class="com.XXX.YYY.batch.listeners.SkipExecutionListener" />
<bean id="jobParametersDAOImpl" class="com.XXX.YYY.batch.dao.JobParametersDAOImpl" />
<bean id="batchLoader" class="com.XXX.YYY.batch.kernel.BatchLoader" />
<bean id="batchjobParameter" class="com.XXX.YYY.batch.dao.Batch_Job_Parameters" />
<!-- <bean id="reportWriter" class="org.springframework.batch.item.data.MongoItemWriter">
<property name="template" ref="mongoTemplate" /> <property name="collection"
value="txnlog" /> name of the collection to write </bean> -->
<bean id="txnLogItemWriter" class="org.springframework.batch.item.file.FlatFileItemWriter"
scope="step">
<property name="shouldDeleteIfExists" value="true" />
<property name="resource" value="file:target/test-outputs/output.txt" />
<property name="lineAggregator">
<bean
class="org.springframework.batch.item.file.transform.PassThroughLineAggregator" />
</property>
</bean>
<bean id="txnLogProcessor"
class="com.XXX.YYY.batch.processor.MessageContextItemProcessor" />
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="jobRepository"
class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean">
<property name="databaseType" value="MYSQL" />
<property name="dataSource" ref="dataSource" />
<property name="transactionManager" ref="transactionManager" />
</bean>
<bean id="dataSource" class="com.XXX.YYY.common.DataSource"
destroy-method="close">
<property name="driverClassName" value="${jdbc.driverClassName}" />
<property name="url" value="${jdbc.url}" />
<property name="username" value="${jdbc.username}" />
<property name="password" value="${jdbc.password}" />
<property name="connectionProperties" value="${jdbc.connectionProperties}" />
<property name="initialSize" value="${jdbc.initialSize}" />
<property name="maxTotal" value="${jdbc.maxTotal}" />
<property name="maxIdle" value="${jdbc.maxIdle}" />
<property name="minIdle" value="${jdbc.minIdle}" />
<property name="maxWaitMillis" value="${jdbc.maxWaitMillis}" />
<property name="testOnBorrow" value="${jdbc.testOnBorrow}" />
<property name="testWhileIdle" value="${jdbc.testWhileIdle}" />
<property name="testOnReturn" value="${jdbc.testOnReturn}" />
<property name="validationQuery" value="${jdbc.validationQuery}" />
</bean>

Get file name from readMultiFileJob in Spring Batch

The following is my Spring Batch processing config file, i am reading multiple files (xml, csv etc), the files generate dynamically with time stamp as suffix i can read file's data and process, now the Question is,
i would like know the file name.
How to get file name when job is processing.
<import resource="../config/context.xml" />
<bean id="domain" class="com.di.pos.Domain" />
<job id="readMultiFileJob" xmlns="http://www.springframework.org/schema/batch">
<step id="step1">
<tasklet>
<chunk reader="multiResourceReader" writer="flatFileItemWriter"
commit-interval="1" />
</tasklet>
</step>
</job>
<bean id="multiResourceReader"
class=" org.springframework.batch.item.file.MultiResourceItemReader">
<property name="resources" value="file:csv/inputs/dipos-*.csv" />
<property name="delegate" ref="flatFileItemReader" />
</bean>
<bean id="flatFileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="names" value="id, name" />
</bean>
</property>
<property name="fieldSetMapper">
<bean
class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="domain" />
</bean>
</property>
</bean>
</property>
</bean>
Create a custom Mapper which extends LineMapper.
Override mapLine() method
public FileData mapLine(String line, int lineNumber) throws Exception {
FileData fileData = new FileData();
Resource currentResource = delegator.getCurrentResource();
String[] fileName = currentResource.getFilename().split("/");
//Use this to access file path
URI fileUri = currentResource.getURI();
return fileData;
}

JDBCBatchItemWriter not receiving the List for batch update

I'm a new bee to spring batch. My requirement is to fetch records from a DB table, process them(each record can be processed independently so i'm partitioning and using a task executor) and then update the status column in the same table based on processing status.
Simplified version of my code is below.
Item Reader (My custom column partitioner will decide the min & max value below):
<bean name="databaseReader" class="org.springframework.batch.item.database.JdbcCursorItemReader" scope="step">
<property name="dataSource" ref="dataSource"/>
<property name="sql">
<value>
<![CDATA[
select id,user_login,user_pass,age from users where id >= #{stepExecutionContext['minValue']} and id <= #{stepExecutionContext['maxValue']}
]]>
</value>
</property>
<property name="rowMapper">
<bean class="com.springapp.batch.UserRowMapper" />
</property>
<property name="verifyCursorPosition" value="false"/>
</bean>
Item Processor:
<bean id="itemProcessor" class="com.springapp.batch.UserItemProcessor" scope="step"/>
....
public class UserItemProcessor implements ItemProcessor<Users, Users>
{
#Override
public Users process(Users users) throws Exception {
// do some processing here..
//update users status
//users.setStatus(users.getId() + ": Processed by :" + Thread.currentThread().getName() + ": Processed at :" + new GregorianCalendar().getTime().toString());
//System.out.println("Processing user :" + users + " :" +Thread.currentThread().getName());
return users;
}
}
Item Writer:
<bean id="databaseWriter" class="org.springframework.batch.item.database.JdbcBatchItemWriter">
<property name="dataSource" ref="dataSource" />
<property name="sql">
<value>
<![CDATA[
update users set status = :status where id= :id
]]>
</value>
</property>
<property name="itemSqlParameterSourceProvider">
<bean class="org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider" />
</property>
</bean>
Step configuration:
<batch:job id="usersJob">
<batch:step id="stepOne">
<batch:partition step="worker" partitioner="myColumnRangepartitioner" handler="partitionHandler" />
</batch:step>
</batch:job>
<batch:step id="worker" >
<batch:tasklet transaction-manager="transactionManager">
<batch:chunk reader="databaseReader" writer="databaseWriter" commit-interval="5" processor="itemProcessor" />
</batch:tasklet>
</batch:step>
<bean id="asyncTaskExecutor" class="org.springframework.core.task.SimpleAsyncTaskExecutor" />
<bean id="partitionHandler" class="org.springframework.batch.core.partition.support.TaskExecutorPartitionHandler" scope="step">
<property name="taskExecutor" ref="asyncTaskExecutor"/>
<property name="step" ref="worker" />
<property name="gridSize" value="3" />
</bean>
Since i have specified the commit interval as 5 my understanding is that when 5 items are processed by a partition, it will call JDBCItemWriter with a List of 5 Users object to perform a batch JDBC update. However with the current setup, i'm receiving 1 User object at a time during batch update.
Is my understanding above correct or am i missing any step/configuration ?
Note: I'm using HSQL file based database for testing.
<bean id="dataSource" class="org.apache.commons.dbcp.BasicDataSource" destroy-method="close">
<property name="driverClassName" value="org.hsqldb.jdbc.JDBCDriver"/>
<property name="url" value="jdbc:hsqldb:file:C://users.txt"/>
<property name="username" value="sa"/>
<property name="password" value=""/>
</bean>

Resources