spring batch with annotations - spring

I'm currently trying to create a batch with the spring annotations but the batch is never called. No error occurs, my batch isn't called. Its a simple batch that retrieves values from the database and add messages in a queue (rabbitmq).
The main configuration class:
#Configuration
#EnableBatchProcessing
public class BatchInfrastructureConfiguration {
#Bean
public JobLauncher getJobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(getJobRepository());
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
public JobRepository getJobRepository() throws Exception {
MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean();
factory.setTransactionManager(new ResourcelessTransactionManager());
factory.afterPropertiesSet();
return (JobRepository) factory.getObject();
}
}
The configuration class specific to my batch
#Configuration
#Import(BatchInfrastructureConfiguration.class)
public class PurchaseStatusBatchConfiguration {
#Inject
private JobBuilderFactory jobBuilders;
#Inject
private StepBuilderFactory stepBuilders;
#Bean
public Job purchaseStatusJob(){
return jobBuilders.get("purchaseStatusJob")
.start(step())
.build();
}
#Bean
public Step step(){
return stepBuilders.get("purchaseStatusStep")
.tasklet(new PurchaseStatusBatch())
.build();
}
}
The batch class:
public class PurchaseStatusBatch implements Tasklet {
#Inject
private PurchaseRepository purchaseRepository;
#Inject
#Qualifier(ApplicationConst.BEAN_QUALIFIER_PURCHASE_QUEUE)
private RabbitTemplate rabbitTemplate;
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
PurchaseDto purchaseDto;
PurchaseMessage purchaseMessage;
List<Purchase> notVerifiedPurchase = purchaseRepository.findByVerified(false);
for (Purchase purchase : notVerifiedPurchase) {
purchaseDto = new PurchaseDto();
purchaseDto.setOrderId(purchase.getOrderId());
purchaseDto.setProductId(purchase.getProductId());
purchaseDto.setPurchaseToken(purchase.getPurchaseToken());
purchaseDto.setUserScrapbookKey(purchase.getUserScrapbookKey());
purchaseMessage = new PurchaseMessage();
purchaseMessage.setPurchaseDto(purchaseDto);
rabbitTemplate.convertAndSend(purchaseMessage);
}
return null;
}
}
The job runner (class calling the batch):
#Service
public class PurchaseStatusJobRunner {
#Inject
private JobLocator jobLocator;
#Inject
private JobLauncher jobLauncher;
//#Scheduled(fixedDelay = 3000L)
//#Scheduled(cron="* * * * *") // every 1 minute
#Scheduled(fixedDelay = 3000L)
public void runJob() throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, NoSuchJobException {
jobLauncher.run(jobLocator.getJob("purchaseStatusJob"), new JobParameters());
}
}

Short answer: The problem is that your PurchaseStatusBatch instance is not a spring bean... then all its attributes are null (they have not been injected)
Try this :
#Bean
public Step step(){
return stepBuilders.get("purchaseStatusStep")
.tasklet(purchaseStatusBatch())
.build();
}
#Bean
public PurchaseStatusBatch purchaseStatusBatch() {
return new PurchaseStatusBatch()
}
When you want to have feedback on your job execution, just use the JobExecution instance returned by the JobLauncher. You can get the ExitStatus, you can get all exceptions catched by the JobLauncher, and more information.
Another solution to have feedback would be providing a real database to your JobRepository, and then check execution statuses in it.

Related

Spring Batch - How to Automatically Restart a Scheduled Job with Multi-Threaded Steps if Failed?

I'm new to Spring Batch. I have a scheduled job which needs to run every 2 hours. This job has several multi-threaded steps which should run independent from each other. The job is currently launched using a JobLauncher, as mentioned below.
#Component
#EnableScheduling
public class JobScheduler {
private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
#Scheduled(cron = "0 0 */2 * * ?")
#Retryable(maxAttempts = 3, backoff = #Backoff(delay = 60000),
include = {SQLException.class, RuntimeException.class})
public void automatedTask() {
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
try {
JobExecution jobExecution = jobLauncher.run(job, jobParameters);
} catch (JobInstanceAlreadyCompleteException | JobRestartException | JobParametersInvalidException |
JobExecutionAlreadyRunningException ex) {
logger.error("Error occurred when executing job scheduler", ex);
}
}
}
Mentioned below is my BatchConfig class.
#Configuration
#EnableBatchProcessing
#EnableRetry
public class BatchConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private DataSource dataSource;
#Bean
#StepScope
public JdbcPagingItemReader<Model> reader1() {
StringBuffer selectClause = new StringBuffer();
selectClause.append("SELECT ");
selectClause.append("* ");
StringBuffer fromClause = new StringBuffer();
fromClause.append("FROM ");
fromClause.append("TABLENAME");
OraclePagingQueryProvider oraclePagingQueryProvider = new OraclePagingQueryProvider();
oraclePagingQueryProvider.setSelectClause(selectClause.toString());
oraclePagingQueryProvider.setFromClause(fromClause.toString());
Map<String, Order> orderByKeys = new HashMap<>();
orderByKeys.put("id", Order.ASCENDING);
oraclePagingQueryProvider.setSortKeys(orderByKeys);
JdbcPagingItemReader<Model> jdbcPagingItemReader = new JdbcPagingItemReader<>();
jdbcPagingItemReader.setSaveState(false);
jdbcPagingItemReader.setDataSource(dataSource);
jdbcPagingItemReader.setQueryProvider(oraclePagingQueryProvider);
jdbcPagingItemReader.setRowMapper(BeanPropertyRowMapper.newInstance(Model.class));
return jdbcPagingItemReader;
}
#Bean
#StepScope
public JdbcPagingItemReader<Model> reader2() {
}
#Bean
#StepScope
public JdbcPagingItemReader<Model> reader3() {
}
#Bean
#StepScope
public ItemWriter<Model> writer1() {
return new CustomItemWriter1();
}
#Bean
#StepScope
public ItemWriter<Model> writer2() {
return new CustomItemWriter2();
}
#Bean
#StepScope
public ItemWriter<Model> writer3() {
return new CustomItemWriter3();
}
#Bean
public Step step1() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(4);
taskExecutor.setMaxPoolSize(4);
taskExecutor.afterPropertiesSet();
return stepBuilderFactory.get("step1")
.<Model, Model>chunk(1000)
.reader(reader1())
.writer(writer1())
.faultTolerant()
.skipPolicy(new AlwaysSkipItemSkipPolicy())
.skip(Exception.class)
.listener(new CustomSkipListener())
.taskExecutor(taskExecutor)
.build();
}
#Bean
public Step step2() {
}
#Bean
public Step step3() {
}
#Bean
public Job myJob() {
return jobBuilderFactory.get("myJob").incrementer(new RunIdIncrementer())
// .listener(new CustomJobExecutionListener())
.start(step1()).on("*").to(step2())
.from(step1()).on(ExitStatus.FAILED.getExitCode()).to(step2())
.from(step2()).on("*").to(step3())
.from(step2()).on(ExitStatus.FAILED.getExitCode()).to(step3())
.end().build();
}
}
I've added conditional flow to the job so that every next step should work regardless of a failure in the previous step. Everything works fine in the initial steps but if an exception is thrown in the last step, the Exit Status of the whole job becomes FAILED. To solve this AND to solve any other failures in the job, I tried to implement restart functionality. Please note that I'm not saving the state in the readers due to multi-threading and I'm not sure whether this could affect the restarting.
I have referred the accepted solution in the below question,
https://stackoverflow.com/questions/38846457/how-can-you-restart-a-failed-spring-batch-job-and-let-it-pick-up-where-it-left-o
but I don't quite understand how or where to call the jobOperator.restart method at.
I've tried it like below, expecting the job to restart after launching, if failed. But it didn't work at all. Also, this implementation would stop the functionality of #Retryable annotation due to the try-catch block with Exception class caught.
#Component
#EnableScheduling
public class JobScheduler {
private static final Logger logger = LoggerFactory.getLogger(JobScheduler.class);
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
#Autowired
private JobRepository jobRepository;
#Autowired
private JobRegistry jobRegistry;
#Autowired
private DataSource dataSource;
#Scheduled(cron = "0 0 */2 * * ?")
#Retryable(maxAttempts = 3, backoff = #Backoff(delay = 60000),
include = {SQLException.class, RuntimeException.class})
public void automatedTask() {
JobParameters jobParameters = new JobParametersBuilder().addLong("time", System.currentTimeMillis()).toJobParameters();
try {
JobExecution jobExecution = jobLauncher.run(job, jobParameters);
JobExplorer jobExplorer = this.getJobExplorer(dataSource);
JobOperator jobOperator = this.getJobOperator(jobLauncher, jobRepository, jobRegistry, jobExplorer);
List<JobInstance> jobInstances = jobExplorer.getJobInstances("myJob",0,1);
if(!jobInstances.isEmpty()){
JobInstance jobInstance = jobInstances.get(0);
List<JobExecution> jobExecutions = jobExplorer.getJobExecutions(jobInstance);
if(!jobExecutions.isEmpty()){
for(JobExecution execution: jobExecutions){
if(execution.getStatus().equals(BatchStatus.FAILED)){
jobOperator.restart(execution.getId());
}
}
}
}
} catch (Exception ex) {
logger.error("Error occurred when executing job scheduler", ex);
}
}
#Bean
public JobOperator getJobOperator(final JobLauncher jobLauncher, final JobRepository jobRepository,
final JobRegistry jobRegistry, final JobExplorer jobExplorer) {
final SimpleJobOperator jobOperator = new SimpleJobOperator();
jobOperator.setJobLauncher(jobLauncher);
jobOperator.setJobRepository(jobRepository);
jobOperator.setJobRegistry(jobRegistry);
jobOperator.setJobExplorer(jobExplorer);
return jobOperator;
}
#Bean
public JobExplorer getJobExplorer(final DataSource dataSource) throws Exception {
final JobExplorerFactoryBean bean = new JobExplorerFactoryBean();
bean.setDataSource(dataSource);
bean.setTablePrefix("BATCH_");
bean.setJdbcOperations(new JdbcTemplate(dataSource));
bean.afterPropertiesSet();
return bean.getObject();
}
}
I then tried adding a custom JobExecutionListener like below, expecting it to restart it after running the job, if failed. But it just fails as all the Autowired beans are becoming NULL.
public class CustomJobExecutionListener {
private static final Logger logger = LoggerFactory.getLogger(CustomJobExecutionListener.class);
#Autowired
private JobLauncher jobLauncher;
#Autowired
private JobRepository jobRepository;
#Autowired
private JobRegistry jobRegistry;
#Autowired
private DataSource dataSource;
#BeforeJob
public void beforeJob(JobExecution jobExecution) {
}
#AfterJob
public void afterJob(JobExecution jobExecution) {
try {
JobExplorer jobExplorer = this.getJobExplorer(dataSource);
JobOperator jobOperator = this.getJobOperator(jobLauncher, jobRepository, jobRegistry, jobExplorer);
if(jobExecution.getStatus().equals(BatchStatus.FAILED)){
jobOperator.restart(jobExecution.getId());
}
} catch (Exception ex) {
logger.error("Unknown error occurred when executing after job execution listener", ex);
}
}
#Bean
public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor(JobRegistry jobRegistry) {
final JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
return jobRegistryBeanPostProcessor;
}
#Bean
public JobOperator getJobOperator(final JobLauncher jobLauncher, final JobRepository jobRepository,
final JobRegistry jobRegistry, final JobExplorer jobExplorer) {
final SimpleJobOperator jobOperator = new SimpleJobOperator();
jobOperator.setJobLauncher(jobLauncher);
jobOperator.setJobRepository(jobRepository);
jobOperator.setJobRegistry(jobRegistry);
jobOperator.setJobExplorer(jobExplorer);
return jobOperator;
}
#Bean
public JobExplorer getJobExplorer(final DataSource dataSource) throws Exception {
final JobExplorerFactoryBean bean = new JobExplorerFactoryBean();
bean.setDataSource(dataSource);
bean.setTablePrefix("BATCH_");
bean.setJdbcOperations(new JdbcTemplate(dataSource));
bean.afterPropertiesSet();
return bean.getObject();
}
}
What am I doing wrong? How should the restart functionality be implemented for this job?
Appreciate your kind help!
Please note that I'm not saving the state in the readers due to multi-threading and I'm not sure whether this could affect the restarting.
It certainly affects restartability. Multi-threading in steps is incompatible with restartability. From the javadoc of the JdbcPagingItemReader that you are using, you can read the following:
The implementation is thread-safe in between calls to open(ExecutionContext),
but remember to use saveState=false if used in a multi-threaded client
(no restart available).
Without restart data, Spring Batch cannot restart the step from where it left-off. This is a trade-off that you have accepted by using a multi-threaded step.
but I don't quite understand how or where to call the jobOperator.restart method at.
Now with regard to restarting the failed job, a few notes:
Trying to restart a job in a JobExecutionListener is incorrect. This listener is called in the scope of the current job execution, while a restart will have its own, distinct job execution
JobOperator#restart should not be called inside the scheduled method, otherwise it will be called for every scheduled run. You can find an example here: https://stackoverflow.com/a/55137314/5019386

How can i restrict duplicate job creation using spring boot and spring batch?

I created a Spring Boot with Spring Batch Application and Scheduling. When i create only one job, things are working fine . But when i try to create another job using the modular approach. The jobs and it's step are running many times and they are getting duplicated.
Getting the below error.
2017-08-24 16:05:00.581 INFO 16172 --- [cTaskExecutor-2] o.s.b.c.l.support.SimpleJobLauncher : Job: [FlowJob: [name=importDeptJob]] completed with the following parameters: [{JobID1=1503579900035}] and the following status: [FAILED]
2017-08-24 16:05:00.581 ERROR 16172 --- [cTaskExecutor-2] o.s.batch.core.step.tasklet.TaskletStep : JobRepository failure forcing rollback
org.springframework.dao.OptimisticLockingFailureException: Attempt to update step execution id=1 with wrong version (3), where current version is 1
Can anyone Please guide me how to resolve these issues and run the jobs in a parallel way independent of each other ?
Below are the configuration Classes : ModularJobConfiguration.java , DeptBatchConfiguration.java ,CityBatchConfiguration.java and BatchScheduler.java
#Configuration
#EnableBatchProcessing(modular=true)
public class ModularJobConfiguration {
#Bean
public ApplicationContextFactory firstJob() {
return new GenericApplicationContextFactory(DeptBatchConfiguration.class);
}
#Bean
public ApplicationContextFactory secondJob() {
return new GenericApplicationContextFactory(CityBatchConfiguration.class);
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class DeptBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(DeptBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<DepartmentModelReader> deptReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performFirstJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("Job1 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID1",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importDeptJob(jobBuilderFactory,stepdept(deptReaderSO,customWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job1 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<DepartmentModelReader> deptReaderSO() {
//LOGGER.info("Inside deptReaderSO Method");
JdbcCursorItemReader<DepartmentModelReader> deptReaderSO = new JdbcCursorItemReader<>();
//deptReaderSO.setSql("select id, firstName, lastname, random_num from reader");
deptReaderSO.setSql("SELECT DEPT_CODE,DEPT_NAME,FULL_DEPT_NAME,CITY_CODE,CITY_NAME,CITY_TYPE_NAME,CREATED_USER_ID,CREATED_G_DATE,MODIFIED_USER_ID,MODIFIED_G_DATE,RECORD_ACTIVITY,DEPT_CLASS,DEPT_PARENT,DEPT_PARENT_NAME FROM TBL_SAMPLE_SAFTY_DEPTS");
deptReaderSO.setDataSource(dataSourceReader);
deptReaderSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
DepartmentModelReader recordSO = new DepartmentModelReader();
recordSO.setDeptCode(resultSet.getString("DEPT_CODE"));
recordSO.setDeptName(resultSet.getString("DEPT_NAME"));
recordSO.setFullDeptName(resultSet.getString("FULL_DEPT_NAME"));
recordSO.setCityCode(resultSet.getInt("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedUserId(resultSet.getInt("CREATED_USER_ID"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
recordSO.setDeptClass(resultSet.getInt("DEPT_CLASS"));
recordSO.setDeptParent(resultSet.getString("DEPT_PARENT"));
recordSO.setDeptParentName(resultSet.getString("DEPT_PARENT_NAME"));
// LOGGER.info("RowMapper record : {}", recordSO.getDeptCode() +" | "+recordSO.getDeptName());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return deptReaderSO;
}
#Bean
public ItemProcessor<DepartmentModelReader, DepartmentModelWriter> processor() {
//LOGGER.info("Inside Processor Method");
return new RecordProcessor();
}
#Bean
public ItemWriter<DepartmentModelWriter> customWriter(){
//LOGGER.info("Inside customWriter Method");
return new CustomItemWriter();
}
#Bean
public Job importDeptJob(JobBuilderFactory jobs, Step stepdept,JobExecutionListener listener){
return jobs.get("importDeptJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepdept).end().build();
}
#Bean
public Step stepdept(ItemReader<DepartmentModelReader> deptReaderSO,
ItemWriter<DepartmentModelWriter> writerSO) {
LOGGER.info("Inside stepdept Method");
return stepBuilderFactory.get("stepdept").<DepartmentModelReader, DepartmentModelWriter>chunk(5)
.reader(deptReaderSO).processor(processor()).writer(customWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class CityBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CityBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<CitiesModelReader> citiesReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performSecondJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("\n Job2 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID2",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importCitiesJob(jobBuilderFactory,stepcity(citiesReaderSO,customCitiesWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job2 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<CitiesModelReader> citiesReaderSO() {
//LOGGER.info("Inside readerSO Method");
JdbcCursorItemReader<CitiesModelReader> readerSO = new JdbcCursorItemReader<>();
readerSO.setSql("SELECT CITY_CODE,CITY_NAME,PARENT_CITY,CITY_TYPE,CITY_TYPE_NAME,CREATED_G_DATE,CREATED_USER_ID,MODIFIED_G_DATE,MODIFIED_USER_ID,RECORD_ACTIVITY FROM TBL_SAMPLE_SAFTY_CITIES");
readerSO.setDataSource(dataSourceReader);
readerSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
CitiesModelReader recordSO = new CitiesModelReader();
recordSO.setCityCode(resultSet.getLong("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setParentCity(resultSet.getInt("PARENT_CITY"));
recordSO.setCityType(resultSet.getString("CITY_TYPE"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setCreatedUserId(resultSet.getString("CREATED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
//LOGGER.info("RowMapper record : {}", recordSO.toString());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return readerSO;
}
#Bean
public ItemProcessor<CitiesModelReader,CitiesModelWriter> citiesProcessor() {
//LOGGER.info("Inside Processor Method");
return new RecordCitiesProcessor();
}
#Bean
public ItemWriter<CitiesModelWriter> customCitiesWriter(){
LOGGER.info("Inside customCitiesWriter Method");
return new CustomCitiesWriter();
}
#Bean
public Job importCitiesJob(JobBuilderFactory jobs, Step stepcity,JobExecutionListener listener) {
LOGGER.info("Inside importCitiesJob Method");
return jobs.get("importCitiesJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepcity).end().build();
}
#Bean
public Step stepcity(ItemReader<CitiesModelReader> readerSO,
ItemWriter<CitiesModelWriter> writerSO) {
LOGGER.info("Inside stepCity Method");
return stepBuilderFactory.get("stepcity").<CitiesModelReader, CitiesModelWriter>chunk(5)
.reader(readerSO).processor(citiesProcessor()).writer(customCitiesWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableScheduling
public class BatchScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(BatchScheduler.class);
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager txManager) throws Exception {
LOGGER.info("Inside mapJobRepositoryFactory method");
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(txManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
LOGGER.info("Inside jobRepository method");
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
LOGGER.info("Inside jobLauncher method");
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
final SimpleAsyncTaskExecutor simpleAsyncTaskExecutor = new SimpleAsyncTaskExecutor();
launcher.setTaskExecutor(simpleAsyncTaskExecutor);
return launcher;
}
}
The map-based SimpleJobRepository created from MapJobRepositoryFactoryBean is not thread-safe.
From the Javadocs:
A FactoryBean that automates the creation of a SimpleJobRepository using non-persistent in-memory DAO implementations. This repository is only really intended for use in testing and rapid prototyping. In such settings you might find that ResourcelessTransactionManager is useful (as long as your business logic does not use a relational database). Not suited for use in multi-threaded jobs with splits, although it should be safe to use in a multi-threaded step.
You can create a JDBC-based SimpleJobRepository from JobRepositoryFactoryBean, which may utilize an in-memory H2 database if you don't require that Batch metadata be persisted.
Since you are using Spring Boot, to use an H2-backed JobRepository simply remove your JobRepository bean and add the following dependency to your pom.xml file:
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
Spring Boot will automatically configure a DataSource as though you had configured the following in your application.properties file and automatically use that DataSource in the creation of a JobRepository.
spring.datasource.url=jdbc:h2:mem:testdb
spring.datasource.driverClassName=org.h2.Driver
spring.datasource.username=sa
spring.datasource.password=
Alternatively, to use some other JDBC-backed JobRepository add the JDBC dependencies for your RDBMS of choice to your project, and configure a DataSource for it (either in code as a DataSource bean, or in application.properties using the spring.datasource prefix as shown above). Spring Boot will automatically use this DataSource during the creation of the JobRepository bean.

Failed to run job based on schedule

I'm tying to run the batch based on schedule.I have used scheduled annotation & cron expression. Batch is running only once. No error is getting displayed. I have added maven dependency for quartz.I have not added any XML file.
#EnableAutoConfiguration(exclude = { DataSourceAutoConfiguration.class,SwaggerConfig.class,
WebMvcAutoConfiguration.class,RepositoryRestMvcAutoConfiguration.class })
#EnableScheduling
#ComponentScan
public class BatchApplication {
public static void main(String[] args) throws Exception {
SpringApplication app = new SpringApplication(BatchApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext ctx = app.run(args);
System.out.println(ctx.getBean(DataSource.class));
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
Job addLeaveAllocationJob = ctx.getBean("addLeaveAllocationJob", Job.class);
JobParameters jobParameters = new JobParametersBuilder().addDate("date", new Date())
.toJobParameters();
JobExecution jobExecution = jobLauncher.run(addLeaveAllocationJob, jobParameters);
BatchStatus batchStatus = jobExecution.getStatus();
while(batchStatus.isRunning()){
System.out.println("*** Still Running ************");
Thread.sleep(2000);
}
}
}
I have job class which is scheduled with #scheduled annotation with cron expression.
#Configuration
#EnableBatchProcessing
#Component
public class LeaveAllocationJobConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private EntityManagerFactory entityManagerFactory;
#Bean
public ItemReader<Employee> reader() {
JpaPagingItemReader<Employee> employeeReader = new JpaPagingItemReader<Employee>();
employeeReader.setEntityManagerFactory(entityManagerFactory);
employeeReader.setQueryString("from Employee");
return employeeReader;
}
#Bean
#Scheduled(cron="0 0/1 * 1/1 * ? *")
public Job addLeaveAllocationJob() {
System.out.println("Hello");
return jobs.get("addLeaveAllocationJob").listener(protocolListener()).start(step()).build();
}
#Bean
public Step step() {
// important to be one in this case to commit after every line read
return stepBuilderFactory.get("step").<Employee, EmployeeDTO> chunk(1).reader(reader()).processor(processor())
.writer(writer()).build();
}
/**
* #return
*/
#Bean
public ItemWriter<? super EmployeeDTO> writer() {
return new ItemWriter<EmployeeDTO>() {
#Override
public void write(List<? extends EmployeeDTO> items) throws Exception {
System.out.println("Processing " + items);
}
};
}
#Bean
public ItemProcessor<Employee, EmployeeDTO> processor() {
return new ItemProcessor<Employee, EmployeeDTO>() {
#Override
public EmployeeDTO process(Employee employee) throws Exception {
return new EmployeeDTO(employee);
}
};
}
#Bean
public ProtocolListener protocolListener() {
return new ProtocolListener();
}
}
Please help me to solve the issue
Chek this link.enter link description here

Quartz + Spring Batch in Spring boot

I'm trying to develop an spring application which have integration of Quartz and Spring Batch. For some reasons, i'm not able to run it properly and getting some compilation errors.
Code:
QuartzConfiguration
#Configuration
#ComponentScan("com.concretepage")
public class QuartzConfiguration {
#Bean
public MethodInvokingJobDetailFactoryBean methodInvokingJobDetailFactoryBean() {
MethodInvokingJobDetailFactoryBean obj = new MethodInvokingJobDetailFactoryBean();
obj.setTargetBeanName("jobone");
obj.setTargetMethod("myTask");
return obj;
}
#Bean
public CronTriggerFactoryBean cronTriggerFactoryBean(){
CronTriggerFactoryBean stFactory = new CronTriggerFactoryBean();
SpringBatchJobs batch = new SpringBatchJobs();
stFactory.setJobDetail(batch.job()); // here i'm getting some compilcation error like "The method job() from the type SpringBatchJobs refers to the missing type Job"
stFactory.setStartDelay(3000);
stFactory.setName("mytrigger");
stFactory.setGroup("mygroup");
stFactory.setCronExpression("0/1 * * * * ?");
return stFactory;
}
#Bean
public SchedulerFactoryBean schedulerFactoryBean() {
SchedulerFactoryBean scheduler = new SchedulerFactoryBean();
scheduler.setTriggers(cronTriggerFactoryBean().getObject());
return scheduler;
}
}
Spring Batch:
#EnableBatchProcessing
public class SpringBatchJobs {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
protected Tasklet tasklet() {
return new Tasklet() {
#Override
public RepeatStatus execute(StepContribution contribution,
ChunkContext context) {
return RepeatStatus.FINISHED;
}
};
}
#Bean
public Job job() throws Exception {
return this.jobs.get("job").start(step1()).build();
}
#Bean
protected Step step1() throws Exception {
return this.steps.get("step1").tasklet(tasklet()).build();
}
}
The problem come on the following line
stFactory.setJobDetail(batch.job()); // here i'm getting some compilcation error like "The method job() from the type SpringBatchJobs refers to the missing type Job"
How can i run Spring Batch jobs using Quartz2 in spring boot? Any idea
Spring batch Job need to be launched by using spring batch job launcher.
Please refer below link for exact details on how to do it.
https://examples.javacodegeeks.com/enterprise-java/spring/batch/quartz-spring-batch-example/

spring batch causing spring data not to commit transaction

I have a spring mvc application with a batch process powered by spring batch . If i remove the batch configurations, all transactions commit. If a batch job is run, the batch job completes successfully but not data is commited to the database
my configurations are as follows
#Configuration
#EnableWebMvc
#EnableAsync
#EnableScheduling
#EnableBatchProcessing(modular = false)
#EnableTransactionManagement
#EnableRabbit
#EnableJpaRepositories(basePackages = "zw.co.econet.workstation.repositories")
#ComponentScan(basePackages = {"zw.co.workstation"})
#PropertySource(value = {"classpath:application.properties"})
public class WebConfiguration extends WebMvcConfigurerAdapter {
#Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**").addResourceLocations("/resources/");
}
#Bean
public InternalResourceViewResolver jspViewResolver() {
InternalResourceViewResolver bean = new InternalResourceViewResolver();
bean.setPrefix("/WEB-INF/pages/");
bean.setSuffix(".jsp");
return bean;
}
}
Spring batch configuration :
#Configuration
public class BatchConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
DataSource dataSource;
#Qualifier("creditQueueItemWriter")
#Autowired
private ItemWriter queueItemWriter;
#Qualifier("creditQueueProcessor")
#Autowired
private CreditQueueProcessor creditQueueProcessor;
#Qualifier("creditQueueReader")
#Autowired
private CreditQueueReader creditQueueReader;
#Qualifier("transactionManager")
#Autowired
private PlatformTransactionManager transactionManager;
#Bean
public AsyncTaskExecutor taskExecutor() {
return new SimpleAsyncTaskExecutor();
}
#Bean
#Autowired
protected Step creditSubscriberStep() throws Exception {
return steps.get("creditSubscriberStep")
.allowStartIfComplete(true)
.startLimit(3)
.chunk(10)
.reader(creditQueueReader)
.processor(creditQueueProcessor)
.writer(queueItemWriter)
.faultTolerant()
.build();
}
#Bean
public Job creditSubscribersJob() throws Exception {
JobBuilder builder = jobs.get("creditSubscriberJob");
return builder
.start(creditSubscriberStep())
.build();
}
#Bean
public JobLauncher jobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository());
jobLauncher.setTaskExecutor(taskExecutor());
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public JobRepository jobRepository() {
try {
JobRepositoryFactoryBean factoryBean = new JobRepositoryFactoryBean();
factoryBean.setDataSource(dataSource);
factoryBean.setTransactionManager(transactionManager);
factoryBean.setIsolationLevelForCreate("DEFAULT");
return factoryBean.getObject();
} catch (Exception e) {
return null;
}
}
#Bean
public DataSourceInitializer databasePopulator() {
ResourceDatabasePopulator populator = new ResourceDatabasePopulator();
populator.addScript(new ClassPathResource("org/springframework/batch/core/schema-mysql.sql"));
populator.setContinueOnError(true);
populator.setIgnoreFailedDrops(true);
DataSourceInitializer initializer = new DataSourceInitializer();
initializer.setDatabasePopulator(populator);
initializer.setDataSource(dataSource);
return initializer;
}
}
Credit writer :
#Service
public class CreditQueueItemWriter implements ItemWriter {
private Logger logger = LoggerFactory.getLogger(getClass());
#Qualifier("creditQueueService")
#Autowired
private CreditQueueService creditQueueService;
#Override
public void write(List<? extends CreditQueue> list) throws Exception {
logger.info("Processing credit list with size {}", list.size());
for (CreditQueue creditQueue : list) {
logger.info("Updating >>>> {} ", creditQueue);
creditQueue.setProcessingState("PROCESSED");
creditQueueService.save(creditQueue);
}
logger.info("chunk processed");
}
}

Resources