how to configure spring dataflow for spring batch - spring

I have spring batch project I want to configure it on spring cloud dataflow I m able to register it on SCDF but on launching task my job is not running
following is my configuration file
#SpringBootApplication
#EnableBatchProcessing
#EnableTask
public class BatchApplication {
/*#Autowired
BatchCommandLineRunner batchcommdrunner;
#Bean
public CommandLineRunner commandLineRunner() {
System.out.println("Executed at :" + new SimpleDateFormat().format(new Date()));
return batchcommdrunner ;
}*/
public static void main(String[] args) {
SpringApplication.run(BatchApplication.class, args);
}
}
And this is my batch confriguration file
#Configuration
public class BatchConfiguaration {
#Autowired
private DataSource datasouce;
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
public Environment env;
#Bean(name = "reader")
#StepScope
public ItemReader<Schedules> reader(#Value("#{stepExecutionContext[scheduleRecs]}") List<Schedules> scherecs) {
ItemReader<Schedules> reader = new IteratorItemReader<Schedules>(scherecs);
return reader;
}
#Bean(name = "CWSreader")
#StepScope
public ItemReader<Contents> CWSreader(#Value("#{stepExecutionContext[scheduleRecs]}") List<Contents> scherecs) {
ItemReader<Contents> reader = new IteratorItemReader<Contents>(scherecs);
return reader;
}
#SuppressWarnings("rawtypes")
#Bean
#StepScope
public BatchProcessor processor() {
return new BatchProcessor();
}
#Bean(name = "batchSchedulePreparedStatement")
#StepScope
public BatchSchedulePreparedStatement batchSchedulePreparedStatement() {
return new BatchSchedulePreparedStatement();
}
#SuppressWarnings({ "rawtypes", "unchecked" })
#Bean(name = "batchWriter")
#StepScope
public BatchWriter batchWriter() {
BatchWriter batchWriter = new BatchWriter();
batchWriter.setDataSource(datasouce);
batchWriter.setSql(env.getProperty("batch.insert.schedule.query"));
batchWriter.setItemPreparedStatementSetter(batchSchedulePreparedStatement());
return batchWriter;
}
#Bean("acheronDbTm")
#Qualifier("acheronDbTm")
public PlatformTransactionManager platformTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public JobExplorer jobExplorer() throws Exception {
MapJobExplorerFactoryBean explorerFactoryBean = new MapJobExplorerFactoryBean();
explorerFactoryBean.setRepositoryFactory(mapJobRepositoryFactoryBean());
explorerFactoryBean.afterPropertiesSet();
return explorerFactoryBean.getObject();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean() {
MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean = new MapJobRepositoryFactoryBean();
mapJobRepositoryFactoryBean.setTransactionManager(platformTransactionManager());
return mapJobRepositoryFactoryBean;
}
#Bean
public JobRepository jobRepository() throws Exception {
return mapJobRepositoryFactoryBean().getObject();
}
#Bean
public SimpleJobLauncher jobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository());
return jobLauncher;
}
#Bean(name = "batchPartition")
#StepScope
public BatchPartition batchPartition() {
BatchPartition batchPartition = new BatchPartition();
return batchPartition;
}
#Bean(name="taskExecutor")
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor poolTaskExecutor = new ThreadPoolTaskExecutor();
poolTaskExecutor.setCorePoolSize(10);
poolTaskExecutor.setMaxPoolSize(30);
poolTaskExecutor.setQueueCapacity(35);
poolTaskExecutor.setThreadNamePrefix("Acheron");
poolTaskExecutor.afterPropertiesSet();
return poolTaskExecutor;
}
#Bean(name = "masterStep")
public Step masterStep() {
return stepBuilderFactory.get("masterStep").partitioner(slave()).partitioner("slave", batchPartition())
.taskExecutor(taskExecutor()).build();
}
#Bean(name = "slave")
public Step slave() {
return stepBuilderFactory.get("slave").chunk(100).faultTolerant().retryLimit(2)
.retry(DeadlockLoserDataAccessException.class).reader(reader(null)).processor(processor())
.writer(batchWriter()).build();
}
#Bean(name = "manageStagingScheduleMaster")
public Job manageStagingScheduleMaster(final Step masterStep) throws Exception {
return jobBuilderFactory.get("manageStagingScheduleMaster").preventRestart().incrementer(new RunIdIncrementer())
.start(masterStep).build();
}
can anyone help me to configure it properly or is there any other way where I can monitor my batch jobs
I also tried with Spring boot admin but it is not supporting java configuration in SBA is there any way to add jobs without jobs in xml
I am launcing this job from controller
JobParametersBuilder builder = new JobParametersBuilder();
System.out.println("Job Builder " + builder);
JobParameters jobParameters = builder.toJobParameters();
JobExecution execution = jobLauncher.run(job, jobParameters);
return execution.getStatus().toString();

This sample shows a basic Spring batch application that can be launched as a task in Spring Cloud Data Flow.

Related

MongoDB Spring Batch job using Gradle

I want to create an item reader, writer and processor for a Spring batch job using Gradle. I am having trouble with a few things. The delimited() portion is giving me an error. I am trying to read two fields for now: rxFname, rxLname.
Here is my code:
#Configuration
#EnableBatchProcessing
public class PaymentPortalJob {
private static final Logger LOG =
LoggerFactory.getLogger(PaymentPortalJob.class);
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
MongoItemReader <PaymentAudit> fileReader(){
return new MongoItemReaderBuilder <PaymentAudit>()
.name("file-reader")
.targetType(PaymentAudit.class)
.delimited().delimiter(",").names(new String [] {"rxFname" , "rxLname"})
.build();
}
#Bean
public ItemProcessor<PaymentAudit, PaymentAudit> processor() {
return new PaymentAuditItemProcessor();
}
#Bean
public ItemWriter<PaymentAudit> writer() {
return new MongoItemWriterBuilder()<PaymentAudit>();
try {
writer.setTemplate(mongoTemplate());
}catch (Exception e) {
LOG.error(e.toString());
}
writer.setCollection("paymentAudit");
return writer;
}
#Bean
Job job (JobBuilderFactory jbf,
StepBuilderFactory sbf,
ItemReader<? extends PaymentAudit> ir,
ItemWriter<? super PaymentAudit> iw) {
Step s1 = sbf.get("file-db")
.<PaymentAudit, PaymentAudit>chunk(100)
.reader(ir)
.writer(iw)
.build();
return jbf.get("etl")
.incrementer(new RunIdIncrementer())
.start(s1)
.build();
}
#Bean
public MongoDbFactory mongoDbFactory() throws Exception {
return new SimpleMongoDbFactory(new MongoClient(), "db-name");
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory());
return mongoTemplate;
}
}

Spring Boot + Spring Batch Multiple Job Creation and Scheduling

I created a Spring Boot with Spring Batch Application and Scheduling. When i create only one job, things are working fine . But when i try to create another job using the modular approach, I am getting few errors like reader is already closed and some errors related to version even though i am using different readers. The jobs and it's step are running many times and they are getting duplicated.
Can anyone Please guide me how to resolve these issues and run the jobs in a parallel way independent of each other ?
Below are the configuration Classes :
ModularJobConfiguration.java , DeptBatchConfiguration.java and CityBatchConfiguration.java and BatchScheduler.java
#Configuration
#EnableBatchProcessing(modular=true)
public class ModularJobConfiguration {
#Bean
public ApplicationContextFactory firstJob() {
return new GenericApplicationContextFactory(DeptBatchConfiguration.class);
}
#Bean
public ApplicationContextFactory secondJob() {
return new GenericApplicationContextFactory(CityBatchConfiguration.class);
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class DeptBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(DeptBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<DepartmentModelReader> deptReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performFirstJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("Job1 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID1",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importDeptJob(jobBuilderFactory,stepdept(deptReaderSO,customWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job1 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<DepartmentModelReader> deptReaderSO() {
//LOGGER.info("Inside deptReaderSO Method");
JdbcCursorItemReader<DepartmentModelReader> deptReaderSO = new JdbcCursorItemReader<>();
//deptReaderSO.setSql("select id, firstName, lastname, random_num from reader");
deptReaderSO.setSql("SELECT DEPT_CODE,DEPT_NAME,FULL_DEPT_NAME,CITY_CODE,CITY_NAME,CITY_TYPE_NAME,CREATED_USER_ID,CREATED_G_DATE,MODIFIED_USER_ID,MODIFIED_G_DATE,RECORD_ACTIVITY,DEPT_CLASS,DEPT_PARENT,DEPT_PARENT_NAME FROM TBL_SAMPLE_SAFTY_DEPTS");
deptReaderSO.setDataSource(dataSourceReader);
deptReaderSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
DepartmentModelReader recordSO = new DepartmentModelReader();
recordSO.setDeptCode(resultSet.getString("DEPT_CODE"));
recordSO.setDeptName(resultSet.getString("DEPT_NAME"));
recordSO.setFullDeptName(resultSet.getString("FULL_DEPT_NAME"));
recordSO.setCityCode(resultSet.getInt("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedUserId(resultSet.getInt("CREATED_USER_ID"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
recordSO.setDeptClass(resultSet.getInt("DEPT_CLASS"));
recordSO.setDeptParent(resultSet.getString("DEPT_PARENT"));
recordSO.setDeptParentName(resultSet.getString("DEPT_PARENT_NAME"));
// LOGGER.info("RowMapper record : {}", recordSO.getDeptCode() +" | "+recordSO.getDeptName());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return deptReaderSO;
}
#Bean
public ItemProcessor<DepartmentModelReader, DepartmentModelWriter> processor() {
//LOGGER.info("Inside Processor Method");
return new RecordProcessor();
}
#Bean
public ItemWriter<DepartmentModelWriter> customWriter(){
//LOGGER.info("Inside customWriter Method");
return new CustomItemWriter();
}
#Bean
public Job importDeptJob(JobBuilderFactory jobs, Step stepdept,JobExecutionListener listener){
return jobs.get("importDeptJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepdept).end().build();
}
#Bean
public Step stepdept(ItemReader<DepartmentModelReader> deptReaderSO,
ItemWriter<DepartmentModelWriter> writerSO) {
LOGGER.info("Inside stepdept Method");
return stepBuilderFactory.get("stepdept").<DepartmentModelReader, DepartmentModelWriter>chunk(5)
.reader(deptReaderSO).processor(processor()).writer(customWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class CityBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CityBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<CitiesModelReader> citiesReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performSecondJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("\n Job2 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID2",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importCitiesJob(jobBuilderFactory,stepcity(citiesReaderSO,customCitiesWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job2 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<CitiesModelReader> citiesReaderSO() {
//LOGGER.info("Inside readerSO Method");
JdbcCursorItemReader<CitiesModelReader> readerSO = new JdbcCursorItemReader<>();
readerSO.setSql("SELECT CITY_CODE,CITY_NAME,PARENT_CITY,CITY_TYPE,CITY_TYPE_NAME,CREATED_G_DATE,CREATED_USER_ID,MODIFIED_G_DATE,MODIFIED_USER_ID,RECORD_ACTIVITY FROM TBL_SAMPLE_SAFTY_CITIES");
readerSO.setDataSource(dataSourceReader);
readerSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
CitiesModelReader recordSO = new CitiesModelReader();
recordSO.setCityCode(resultSet.getLong("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setParentCity(resultSet.getInt("PARENT_CITY"));
recordSO.setCityType(resultSet.getString("CITY_TYPE"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setCreatedUserId(resultSet.getString("CREATED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
//LOGGER.info("RowMapper record : {}", recordSO.toString());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return readerSO;
}
#Bean
public ItemProcessor<CitiesModelReader,CitiesModelWriter> citiesProcessor() {
//LOGGER.info("Inside Processor Method");
return new RecordCitiesProcessor();
}
#Bean
public ItemWriter<CitiesModelWriter> customCitiesWriter(){
LOGGER.info("Inside customCitiesWriter Method");
return new CustomCitiesWriter();
}
#Bean
public Job importCitiesJob(JobBuilderFactory jobs, Step stepcity,JobExecutionListener listener) {
LOGGER.info("Inside importCitiesJob Method");
return jobs.get("importCitiesJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepcity).end().build();
}
#Bean
public Step stepcity(ItemReader<CitiesModelReader> readerSO,
ItemWriter<CitiesModelWriter> writerSO) {
LOGGER.info("Inside stepCity Method");
return stepBuilderFactory.get("stepcity").<CitiesModelReader, CitiesModelWriter>chunk(5)
.reader(readerSO).processor(citiesProcessor()).writer(customCitiesWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableScheduling
public class BatchScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(BatchScheduler.class);
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager txManager) throws Exception {
LOGGER.info("Inside mapJobRepositoryFactory method");
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(txManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
LOGGER.info("Inside jobRepository method");
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
LOGGER.info("Inside jobLauncher method");
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
final SimpleAsyncTaskExecutor simpleAsyncTaskExecutor = new SimpleAsyncTaskExecutor();
launcher.setTaskExecutor(simpleAsyncTaskExecutor);
return launcher;
}
}
Your readers are not thread safe and not step scoped. Because of that, you're running into concurrency issues. Configure each of your stateful ItemReaders (the ones that implement ItemStream like the JdbcCursorItemReader), to be step scoped by adding the #StepScope annotation and things should work fine.

How can i restrict duplicate job creation using spring boot and spring batch?

I created a Spring Boot with Spring Batch Application and Scheduling. When i create only one job, things are working fine . But when i try to create another job using the modular approach. The jobs and it's step are running many times and they are getting duplicated.
Getting the below error.
2017-08-24 16:05:00.581 INFO 16172 --- [cTaskExecutor-2] o.s.b.c.l.support.SimpleJobLauncher : Job: [FlowJob: [name=importDeptJob]] completed with the following parameters: [{JobID1=1503579900035}] and the following status: [FAILED]
2017-08-24 16:05:00.581 ERROR 16172 --- [cTaskExecutor-2] o.s.batch.core.step.tasklet.TaskletStep : JobRepository failure forcing rollback
org.springframework.dao.OptimisticLockingFailureException: Attempt to update step execution id=1 with wrong version (3), where current version is 1
Can anyone Please guide me how to resolve these issues and run the jobs in a parallel way independent of each other ?
Below are the configuration Classes : ModularJobConfiguration.java , DeptBatchConfiguration.java ,CityBatchConfiguration.java and BatchScheduler.java
#Configuration
#EnableBatchProcessing(modular=true)
public class ModularJobConfiguration {
#Bean
public ApplicationContextFactory firstJob() {
return new GenericApplicationContextFactory(DeptBatchConfiguration.class);
}
#Bean
public ApplicationContextFactory secondJob() {
return new GenericApplicationContextFactory(CityBatchConfiguration.class);
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class DeptBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(DeptBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<DepartmentModelReader> deptReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performFirstJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("Job1 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID1",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importDeptJob(jobBuilderFactory,stepdept(deptReaderSO,customWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job1 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<DepartmentModelReader> deptReaderSO() {
//LOGGER.info("Inside deptReaderSO Method");
JdbcCursorItemReader<DepartmentModelReader> deptReaderSO = new JdbcCursorItemReader<>();
//deptReaderSO.setSql("select id, firstName, lastname, random_num from reader");
deptReaderSO.setSql("SELECT DEPT_CODE,DEPT_NAME,FULL_DEPT_NAME,CITY_CODE,CITY_NAME,CITY_TYPE_NAME,CREATED_USER_ID,CREATED_G_DATE,MODIFIED_USER_ID,MODIFIED_G_DATE,RECORD_ACTIVITY,DEPT_CLASS,DEPT_PARENT,DEPT_PARENT_NAME FROM TBL_SAMPLE_SAFTY_DEPTS");
deptReaderSO.setDataSource(dataSourceReader);
deptReaderSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
DepartmentModelReader recordSO = new DepartmentModelReader();
recordSO.setDeptCode(resultSet.getString("DEPT_CODE"));
recordSO.setDeptName(resultSet.getString("DEPT_NAME"));
recordSO.setFullDeptName(resultSet.getString("FULL_DEPT_NAME"));
recordSO.setCityCode(resultSet.getInt("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedUserId(resultSet.getInt("CREATED_USER_ID"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
recordSO.setDeptClass(resultSet.getInt("DEPT_CLASS"));
recordSO.setDeptParent(resultSet.getString("DEPT_PARENT"));
recordSO.setDeptParentName(resultSet.getString("DEPT_PARENT_NAME"));
// LOGGER.info("RowMapper record : {}", recordSO.getDeptCode() +" | "+recordSO.getDeptName());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return deptReaderSO;
}
#Bean
public ItemProcessor<DepartmentModelReader, DepartmentModelWriter> processor() {
//LOGGER.info("Inside Processor Method");
return new RecordProcessor();
}
#Bean
public ItemWriter<DepartmentModelWriter> customWriter(){
//LOGGER.info("Inside customWriter Method");
return new CustomItemWriter();
}
#Bean
public Job importDeptJob(JobBuilderFactory jobs, Step stepdept,JobExecutionListener listener){
return jobs.get("importDeptJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepdept).end().build();
}
#Bean
public Step stepdept(ItemReader<DepartmentModelReader> deptReaderSO,
ItemWriter<DepartmentModelWriter> writerSO) {
LOGGER.info("Inside stepdept Method");
return stepBuilderFactory.get("stepdept").<DepartmentModelReader, DepartmentModelWriter>chunk(5)
.reader(deptReaderSO).processor(processor()).writer(customWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class CityBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CityBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<CitiesModelReader> citiesReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performSecondJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("\n Job2 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID2",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importCitiesJob(jobBuilderFactory,stepcity(citiesReaderSO,customCitiesWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job2 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<CitiesModelReader> citiesReaderSO() {
//LOGGER.info("Inside readerSO Method");
JdbcCursorItemReader<CitiesModelReader> readerSO = new JdbcCursorItemReader<>();
readerSO.setSql("SELECT CITY_CODE,CITY_NAME,PARENT_CITY,CITY_TYPE,CITY_TYPE_NAME,CREATED_G_DATE,CREATED_USER_ID,MODIFIED_G_DATE,MODIFIED_USER_ID,RECORD_ACTIVITY FROM TBL_SAMPLE_SAFTY_CITIES");
readerSO.setDataSource(dataSourceReader);
readerSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
CitiesModelReader recordSO = new CitiesModelReader();
recordSO.setCityCode(resultSet.getLong("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setParentCity(resultSet.getInt("PARENT_CITY"));
recordSO.setCityType(resultSet.getString("CITY_TYPE"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setCreatedUserId(resultSet.getString("CREATED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
//LOGGER.info("RowMapper record : {}", recordSO.toString());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return readerSO;
}
#Bean
public ItemProcessor<CitiesModelReader,CitiesModelWriter> citiesProcessor() {
//LOGGER.info("Inside Processor Method");
return new RecordCitiesProcessor();
}
#Bean
public ItemWriter<CitiesModelWriter> customCitiesWriter(){
LOGGER.info("Inside customCitiesWriter Method");
return new CustomCitiesWriter();
}
#Bean
public Job importCitiesJob(JobBuilderFactory jobs, Step stepcity,JobExecutionListener listener) {
LOGGER.info("Inside importCitiesJob Method");
return jobs.get("importCitiesJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepcity).end().build();
}
#Bean
public Step stepcity(ItemReader<CitiesModelReader> readerSO,
ItemWriter<CitiesModelWriter> writerSO) {
LOGGER.info("Inside stepCity Method");
return stepBuilderFactory.get("stepcity").<CitiesModelReader, CitiesModelWriter>chunk(5)
.reader(readerSO).processor(citiesProcessor()).writer(customCitiesWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableScheduling
public class BatchScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(BatchScheduler.class);
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager txManager) throws Exception {
LOGGER.info("Inside mapJobRepositoryFactory method");
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(txManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
LOGGER.info("Inside jobRepository method");
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
LOGGER.info("Inside jobLauncher method");
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
final SimpleAsyncTaskExecutor simpleAsyncTaskExecutor = new SimpleAsyncTaskExecutor();
launcher.setTaskExecutor(simpleAsyncTaskExecutor);
return launcher;
}
}
The map-based SimpleJobRepository created from MapJobRepositoryFactoryBean is not thread-safe.
From the Javadocs:
A FactoryBean that automates the creation of a SimpleJobRepository using non-persistent in-memory DAO implementations. This repository is only really intended for use in testing and rapid prototyping. In such settings you might find that ResourcelessTransactionManager is useful (as long as your business logic does not use a relational database). Not suited for use in multi-threaded jobs with splits, although it should be safe to use in a multi-threaded step.
You can create a JDBC-based SimpleJobRepository from JobRepositoryFactoryBean, which may utilize an in-memory H2 database if you don't require that Batch metadata be persisted.
Since you are using Spring Boot, to use an H2-backed JobRepository simply remove your JobRepository bean and add the following dependency to your pom.xml file:
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
Spring Boot will automatically configure a DataSource as though you had configured the following in your application.properties file and automatically use that DataSource in the creation of a JobRepository.
spring.datasource.url=jdbc:h2:mem:testdb
spring.datasource.driverClassName=org.h2.Driver
spring.datasource.username=sa
spring.datasource.password=
Alternatively, to use some other JDBC-backed JobRepository add the JDBC dependencies for your RDBMS of choice to your project, and configure a DataSource for it (either in code as a DataSource bean, or in application.properties using the spring.datasource prefix as shown above). Spring Boot will automatically use this DataSource during the creation of the JobRepository bean.

Cannot autowired beans when separate configuration classes

I have a JavaConfig configurated Spring Batch job. The main job configuration file is CrawlerJobConfiguration. Before now, I have all the configuration (infrastructure, autowired beans, etc) in this class and it works fine. So I decided to separate the job configuration from autowired beans and infracstruture beans configuration and create another 2 configuration classes Beans and MysqlInfrastructureConfiguration.
But now I am having problems to run my job. I'm receiving a NullPointerException when the application try to use autowired fields indicating that the autowired is not working.
I put a breakpoint in the methods that create autowired beans to make sure that they are being called and really are, so I cannot realize what can be the problem.
java.lang.NullPointerException: null
at br.com.alexpfx.supermarket.batch.tasklet.StartCrawlerTasklet.execute(StartCrawlerTasklet.java:27) ~[classes/:na]
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:406) ~[spring-batch-core-3.0.6.RELEASE.jar:3.0.6.RELEASE]
Main job configuration class:
#Configuration
#EnableBatchProcessing
public class CrawlerJobConfiguration {
#Autowired
private InfrastructureConfiguration infrastructureConfiguration;
#Autowired
private StepBuilderFactory steps;
#Autowired
Environment environment;
#Bean
public Job job(JobBuilderFactory jobs) {
Job theJob = jobs.get("job").start(crawlerStep()).next(processProductStep()).build();
((AbstractJob) theJob).setRestartable(true);
return theJob;
}
#Bean
public Step crawlerStep() {
TaskletStep crawlerStep = steps.get("crawlerStep").tasklet(crawlerTasklet()).build();
crawlerStep.setAllowStartIfComplete(true);
return crawlerStep;
}
#Bean
public Step processProductStep() {
TaskletStep processProductStep = steps.get("processProductStep")
.<TransferObject, Product>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer())
.build();
processProductStep.setAllowStartIfComplete(true);
return processProductStep;
}
private Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}
private ItemProcessor<TransferObject, Product> processor() {
return new ProductProcessor();
}
private ItemReader<TransferObject> reader() {
return new ProductItemReader();
}
private ItemWriter<Product> writer() {
return new HibernateProductsItemWriter();
}
}
Beans configuration class:
#Configuration
#EnableBatchProcessing
public class Beans {
#Bean
public Crawler crawler() {
return new RibeiraoCrawler(new UserAgentFactory());
}
#Bean
public ProductBo productBo() {
return new ProductBoImpl();
}
#Bean
public ProductDao productDao() {
return new ProductDaoImpl();
}
#Bean
public CrawlerListener listener() {
CrawlerListener listener = new RibeiraoListener();
return listener;
}
#Bean
public ProductList getProductList() {
return new ProductList();
}
}
MysqlInfrastructureConfiguration:
#Configuration
#EnableBatchProcessing
#PropertySource("classpath:database.properties")
#EnableJpaRepositories(basePackages = {"br.com.alexpfx.supermarket.domain"})
public class MysqlInfrastructureConfiguration implements InfrastructureConfiguration {
#Value("${jdbc.url}")
String url;
#Value("${jdbc.driverClassName}")
String driverClassName;
#Value("${jdbc.username}")
String username;
#Value("${jdbc.password}")
String password;
#Bean
#Override
public DataSource getDataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverClassName);
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
return dataSource;
}
#Bean
#Override
public PlatformTransactionManager transactionManager() {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory());
transactionManager.setDataSource(getDataSource());
return transactionManager;
}
#Bean
#Override
public EntityManagerFactory entityManagerFactory() {
LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(getDataSource());
em.setPackagesToScan(new String[]{"br.com.alexpfx.supermarket.domain"});
JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(vendorAdapter);
em.setJpaProperties(additionalJpaProperties());
em.afterPropertiesSet();
return em.getObject();
}
private Properties additionalJpaProperties() {
Properties properties = new Properties();
properties.setProperty("hibernate.hbm2ddl.auto", "create");
properties.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
properties.setProperty("hibernate.show_sql", "true");
properties.setProperty("current_session_context_class", "thread");
return properties;
}
}
tasklet:
public class StartCrawlerTasklet implements Tasklet {
#Autowired
private Crawler crawler;
#Autowired
private CrawlerListener listener;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
crawler.setListener(listener);
crawler.setStopCondition(new TimeLimitStopCondition(1, TimeUnit.MINUTES));
crawler.crawl();
return RepeatStatus.FINISHED;
}
}
StartCrawlerTasklet use the autowired annotation, so it should be a bean as well. So change your code :
private Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}
to a bean definition:
#Bean
public Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}

spring batch causing spring data not to commit transaction

I have a spring mvc application with a batch process powered by spring batch . If i remove the batch configurations, all transactions commit. If a batch job is run, the batch job completes successfully but not data is commited to the database
my configurations are as follows
#Configuration
#EnableWebMvc
#EnableAsync
#EnableScheduling
#EnableBatchProcessing(modular = false)
#EnableTransactionManagement
#EnableRabbit
#EnableJpaRepositories(basePackages = "zw.co.econet.workstation.repositories")
#ComponentScan(basePackages = {"zw.co.workstation"})
#PropertySource(value = {"classpath:application.properties"})
public class WebConfiguration extends WebMvcConfigurerAdapter {
#Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**").addResourceLocations("/resources/");
}
#Bean
public InternalResourceViewResolver jspViewResolver() {
InternalResourceViewResolver bean = new InternalResourceViewResolver();
bean.setPrefix("/WEB-INF/pages/");
bean.setSuffix(".jsp");
return bean;
}
}
Spring batch configuration :
#Configuration
public class BatchConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
DataSource dataSource;
#Qualifier("creditQueueItemWriter")
#Autowired
private ItemWriter queueItemWriter;
#Qualifier("creditQueueProcessor")
#Autowired
private CreditQueueProcessor creditQueueProcessor;
#Qualifier("creditQueueReader")
#Autowired
private CreditQueueReader creditQueueReader;
#Qualifier("transactionManager")
#Autowired
private PlatformTransactionManager transactionManager;
#Bean
public AsyncTaskExecutor taskExecutor() {
return new SimpleAsyncTaskExecutor();
}
#Bean
#Autowired
protected Step creditSubscriberStep() throws Exception {
return steps.get("creditSubscriberStep")
.allowStartIfComplete(true)
.startLimit(3)
.chunk(10)
.reader(creditQueueReader)
.processor(creditQueueProcessor)
.writer(queueItemWriter)
.faultTolerant()
.build();
}
#Bean
public Job creditSubscribersJob() throws Exception {
JobBuilder builder = jobs.get("creditSubscriberJob");
return builder
.start(creditSubscriberStep())
.build();
}
#Bean
public JobLauncher jobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository());
jobLauncher.setTaskExecutor(taskExecutor());
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public JobRepository jobRepository() {
try {
JobRepositoryFactoryBean factoryBean = new JobRepositoryFactoryBean();
factoryBean.setDataSource(dataSource);
factoryBean.setTransactionManager(transactionManager);
factoryBean.setIsolationLevelForCreate("DEFAULT");
return factoryBean.getObject();
} catch (Exception e) {
return null;
}
}
#Bean
public DataSourceInitializer databasePopulator() {
ResourceDatabasePopulator populator = new ResourceDatabasePopulator();
populator.addScript(new ClassPathResource("org/springframework/batch/core/schema-mysql.sql"));
populator.setContinueOnError(true);
populator.setIgnoreFailedDrops(true);
DataSourceInitializer initializer = new DataSourceInitializer();
initializer.setDatabasePopulator(populator);
initializer.setDataSource(dataSource);
return initializer;
}
}
Credit writer :
#Service
public class CreditQueueItemWriter implements ItemWriter {
private Logger logger = LoggerFactory.getLogger(getClass());
#Qualifier("creditQueueService")
#Autowired
private CreditQueueService creditQueueService;
#Override
public void write(List<? extends CreditQueue> list) throws Exception {
logger.info("Processing credit list with size {}", list.size());
for (CreditQueue creditQueue : list) {
logger.info("Updating >>>> {} ", creditQueue);
creditQueue.setProcessingState("PROCESSED");
creditQueueService.save(creditQueue);
}
logger.info("chunk processed");
}
}

Resources