JpaPagingItemReader - endless Loop on failed read - spring-boot

Hi I am getting endless loop when exception was thrown.
Is there something wrong to the transactionmanager I am using?
Thanks!
Below is my job setup
#Bean
public ItemReader<Entity> bizAppReader() {
#SuppressWarnings("serial")
Map<String, Object> map = new HashMap<String, Object>() {{
put("status", Arrays.asList(new ApplicationStatus[] {ApplicationStatus.COMPLETED, ApplicationStatus.PENDING, ApplicationStatus.INITIATE}));
}};
return new JpaPagingItemReaderBuilder<Entity>()
.name("bizAppJpaReader")
.entityManagerFactory(entityManagerFactory)
.queryString("from Entity b where b.status in (:status)").parameterValues(map)
.build();
}
#Bean
public Step bizApplicatonStep(#Value("${batch.chunk_size:10}") int chunkSize,
ItemReader<Entity> bizAppReader,
ItemProcessor<Entity, Map<ApplicationStatus, String>> bizAppProcessor,
ItemWriter<Map<ApplicationStatus, String>> bizAppWriter) {
return stepBuilderFactory.get("bizApplicatonStep")
.<Entity, Map<ApplicationStatus, String>>chunk(chunkSize).reader(bizAppReader)
.processor(bizAppProcessor).writer(bizAppWriter)
.faultTolerant()
.skip(Exception.class)
.noRollback(Exception.class)
.skipPolicy((e, i) -> {
log.error("Exception occurred : ", e);
return true;
})
.allowStartIfComplete(true)
.build();
}
#Slf4j
#SpringBootApplication
#EnableBatchProcessing
public class BatchApplication extends DefaultBatchConfigurer {
public static void main(String[] args) throws Exception {
ConfigurableApplicationContext context = SpringApplication.run(BatchApplication.class, args);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job;
JobParameters jobParameters;
if (ArrayUtils.isNotEmpty(args) && args[0].equals("bookingJob")) {
job = context.getBean("bookingJob", Job.class);
jobParameters = new JobParametersBuilder()
.addString("startMonth", args[1])
.addString("endMonth", args[2])
.toJobParameters();
} else {
job = context.getBean("bizApplicationJob", Job.class);
jobParameters = new JobParametersBuilder().toJobParameters();
}
JobExecution jobExecution = jobLauncher.run(job, jobParameters);
if (BatchStatus.FAILED.equals(jobExecution.getStatus())) {
log.error("job execution completed exit code is : 1 ");
System.exit(1);
}
log.info("job execution completed exit code is : 0 ");
}
#Override
protected JobRepository createJobRepository() throws Exception {
return new MapJobRepositoryFactoryBean(new ResourcelessTransactionManager()).getObject();
}
}
Stacktrace:
10:13:20.657 [main] ERROR c.u.pweb.bizacct.batch.BizJobConfig - Exception occurred :
java.lang.IllegalStateException: Transaction already active
at org.hibernate.engine.transaction.internal.TransactionImpl.begin(TransactionImpl.java:52)
-===================== Updated application to replicate =====================
this happen if the reader failed to read with invalid Enum value from DB.
#Slf4j
#SpringBootApplication
#EnableBatchProcessing
public class DemoBatchApplication extends DefaultBatchConfigurer {
public static void main(String[] args) {
SpringApplication.run(DemoBatchApplication.class, args);
}
#Override
public void setDataSource(DataSource dataSource) {
super.setDataSource(null);
}
private final EntityManagerFactory entityManagerFactory;
public DemoBatchApplication(EntityManagerFactory entityManagerFactory) {
this.entityManagerFactory = entityManagerFactory;
}
#Override
public PlatformTransactionManager getTransactionManager() {
return new JpaTransactionManager(this.entityManagerFactory);
}
}
enum
public enum ApplicationStatus {
MA, AS, SH, CP, AA, AP
}
table
#Entity
#Table(name = "APP_TBL_TEST")
#Data
#Builder
#AllArgsConstructor
#NoArgsConstructor
public class ApplicationEntity {
#Id
#GeneratedValue(generator = "system-uuid")
#GenericGenerator(name = "system-uuid", strategy = "uuid2")
#Column(name = "ID", length = 50)
private String id;
#Column(name = "APPLICATION_NAME", unique = true, length = 50)
private String referenceNumber;
#Column(name = "STATUS")
#Enumerated(EnumType.STRING)
private ApplicationStatus status;
}
batch config
#Configuration
#Slf4j
#Data
public class ApplicationBatchCfg {
#Autowired
public EntityManagerFactory entityManagerFactory;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
public Step applicationStep(#Value("${batch.chunk_size:5}") int chunkSize,
ItemReader<ApplicationEntity> appReader,
ItemProcessor<ApplicationEntity, String> appProcessor,
ItemWriter<String> appWriter){
return stepBuilderFactory.get("applicatonStep")
.<ApplicationEntity, String>chunk(chunkSize)
.reader(appReader)
.processor(appProcessor)
.writer(appWriter)
.faultTolerant()
.skip(Exception.class)
.noRollback(Exception.class)
.skipPolicy((e, i) -> {
log.error("Exception occurred : ", e);
return true;
})
.skipLimit(5)
.allowStartIfComplete(true)
.build();
}
#Bean
public Job applicationJob(Step applicationStep) {
return jobBuilderFactory.get("applicationJob").incrementer(new RunIdIncrementer())
.start(applicationStep).build();
}
#Bean
public ItemReader<ApplicationEntity> appReader() {
return new JpaPagingItemReaderBuilder<ApplicationEntity>()
.name("appReader")
.entityManagerFactory(entityManagerFactory)
.queryString("from ApplicationEntity")
.pageSize(3)
.build();
}
#Bean
public ItemProcessor<ApplicationEntity, String> appProcessor(){
return new ItemProcessor<ApplicationEntity, String>() {
#Override
public String process(ApplicationEntity item) throws Exception {
//nothing for demo only
log.info("item {}", item);
return null;
}
};
}
#Bean
public ItemWriter<String> appWriter() {
return new ItemWriter<String>() {
#Override
public void write(List<? extends String> items) throws Exception {
//nothing for demo only
}
};
}
}
inside yml
spring:
jpa:
properties:
hibernate:
show_sql: true
use_sql_comments : true
format_sql: true
dialect: org.hibernate.dialect.Oracle10gDialect
datasource:
useWallet: false
url: jdbc:oracle:thin:#DB12397:1521:azd
platform: oracle
username: user
password: pwd

Related

Records are not written in files when invoked from BillerOrderWriter which implements ItemWriter in Spring Batch

I am trying to write successful records using one writer and failed records in another writer.
I have written BillerOrderWriter class which implements ItemWriter. I put some log statements and I can see it writes success billerOrderId or failed billerOrderId . But, it seems like it does not invoke DatabaseToCsvFileJobConfig or SuccessfulOrdersToCsvFileJobConfig .
public class BillerOrderWriter implements ItemWriter<BillerOrder>{
private static Logger log = LoggerFactory.getLogger("BillerOrderWriter.class");
#Autowired
SuccessfulOrdersToCsvFileJobConfig successfulOrdersToCsvFileJobConfig;
#Autowired
DatabaseToCsvFileJobConfig databaseToCsvFileJobConfig;
#Override
public void write(List<? extends BillerOrder> items) throws Exception {
for (BillerOrder item : items) {
log.info("item = " + item.toString());
if (item.getResult().equals("SUCCESS")) {
log.info(" Success billerOrderId = " + item.getBillerOrderId());
successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter();
} else {
log.info("Failed billerOrderId = " + item.getBillerOrderId());
databaseToCsvFileJobConfig.databaseCsvItemWriter();
}
}
}
}
Here is BatchConfig class.
#Bean
public BillerOrderWriter billerOrderWriter() {
return new BillerOrderWriter();
}
#Bean
public Job importJobOrder(JobCompletionNotificationListner listener, Step step1) {
return jobBuilderFactory.get("importJobOrder")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(step1)
.end()
.build();
}
#Bean(name="step1")
public Step step1(BillerOrderWriter billerOrderWriter) {
return stepBuilderFactory.get("step1")
.<BillerOrder, BillerOrder> chunk(10)
.reader((ItemReader<? extends BillerOrder>) reader())
.processor(processor())
.writer(billerOrderWriter)
.build();
}
Here is my successwriter and failedwriter class .
#Configuration
public class SuccessfulOrdersToCsvFileJobConfig {
private static Logger log = LoggerFactory.getLogger("SuccessfulOrdersToCsvFileJobConfig.class");
#Bean
public ItemWriter<BillerOrder> successfulDatabaseCsvItemWriter() {
log.info("Entering SuccessfulOrdersToCsvFileJobConfig...");
FlatFileItemWriter<BillerOrder> csvFileWriter = new FlatFileItemWriter<>();
String exportFileHeader = "BillerOrderId;SuccessMessage";
OrderWriter headerWriter = new OrderWriter(exportFileHeader);
csvFileWriter.setHeaderCallback(headerWriter);
String exportFilePath = "/tmp/SuccessBillerOrderIdForRetry.csv";
csvFileWriter.setResource(new FileSystemResource(exportFilePath));
LineAggregator<BillerOrder> lineAggregator = createOrderLineAggregator();
csvFileWriter.setLineAggregator(lineAggregator);
return csvFileWriter;
}
private LineAggregator<BillerOrder> createOrderLineAggregator() {
log.info("Entering createOrderLineAggregator...");
DelimitedLineAggregator<BillerOrder> lineAggregator = new DelimitedLineAggregator<>();
lineAggregator.setDelimiter(";");
FieldExtractor<BillerOrder> fieldExtractor = createOrderFieldExtractor();
lineAggregator.setFieldExtractor(fieldExtractor);
return lineAggregator;
}
private FieldExtractor<BillerOrder> createOrderFieldExtractor() {
log.info("Entering createOrderFieldExtractor...");
BeanWrapperFieldExtractor<BillerOrder> extractor = new BeanWrapperFieldExtractor<>();
extractor.setNames(new String[] {"billerOrderId","successMessage"});
return extractor;
}
}
#Configuration
public class DatabaseToCsvFileJobConfig {
private static Logger log = LoggerFactory.getLogger("DatabaseToCsvFileJobConfig.class");
#Bean
public ItemWriter<BillerOrder> databaseCsvItemWriter() {
log.info("Entering databaseCsvItemWriter...");
FlatFileItemWriter<BillerOrder> csvFileWriter = new FlatFileItemWriter<>();
String exportFileHeader = "BillerOrderId;ErrorMessage";
OrderWriter headerWriter = new OrderWriter(exportFileHeader);
csvFileWriter.setHeaderCallback(headerWriter);
String exportFilePath = "/tmp/FailedBillerOrderIdForRetry.csv";
csvFileWriter.setResource(new FileSystemResource(exportFilePath));
LineAggregator<BillerOrder> lineAggregator = createOrderLineAggregator();
csvFileWriter.setLineAggregator(lineAggregator);
return csvFileWriter;
}
private LineAggregator<BillerOrder> createOrderLineAggregator() {
log.info("Entering createOrderLineAggregator...");
DelimitedLineAggregator<BillerOrder> lineAggregator = new DelimitedLineAggregator<>();
lineAggregator.setDelimiter(";");
FieldExtractor<BillerOrder> fieldExtractor = createOrderFieldExtractor();
lineAggregator.setFieldExtractor(fieldExtractor);
return lineAggregator;
}
private FieldExtractor<BillerOrder> createOrderFieldExtractor() {
log.info("Entering createOrderFieldExtractor...");
BeanWrapperFieldExtractor<BillerOrder> extractor = new BeanWrapperFieldExtractor<>();
extractor.setNames(new String[] {"billerOrderId","errorMessage"});
return extractor;
}
}
Here is my job completion listener class.
#Component
public class JobCompletionNotificationListner extends JobExecutionListenerSupport {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(JobCompletionNotificationListner.class);
#Override
public void afterJob(JobExecution jobExecution) {
log.info("In afterJob ...");
if (jobExecution.getStatus() == BatchStatus.COMPLETED) {
DatabaseToCsvFileJobConfig databaseToCsvFileJobConfig = new DatabaseToCsvFileJobConfig();
SuccessfulOrdersToCsvFileJobConfig successfulOrdersToCsvFileJobConfig = new SuccessfulOrdersToCsvFileJobConfig();
}
}
}
In your BillerOrderWriter#write method, it is supposed to write code that does the actual write operation of items to a data sink. But in your case, you are calling successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter(); and databaseToCsvFileJobConfig.databaseCsvItemWriter(); which create item writer beans. You should inject those delegate writers and call their write method when needed, something like:
public class BillerOrderWriter implements ItemWriter<BillerOrder>{
private ItemWriter<BillerOrder> successfulDatabaseCsvItemWriter;
private ItemWriter<BillerOrder> databaseCsvItemWriter;
// constructor with successfulDatabaseCsvItemWriter + databaseCsvItemWriter
#Override
public void write(List<? extends BillerOrder> items) throws Exception {
for (BillerOrder item : items) {
if (item.getResult().equals("SUCCESS")) {
successfulDatabaseCsvItemWriter.write(Collections.singletonList(item));
} else {
databaseCsvItemWriter.write(Collections.singletonList(item));
}
}
}
}
Instead Of BillerOrderWriter, I wroter BillerOrderClassifier class.
public class BillerOrderClassifier implements Classifier<BillerOrder, ItemWriter<? super BillerOrder>> {
private static final long serialVersionUID = 1L;
private ItemWriter<BillerOrder> successItemWriter;
private ItemWriter<BillerOrder> failedItemWriter;
public BillerOrderClassifier(ItemWriter<BillerOrder> successItemWriter, ItemWriter<BillerOrder> failedItemWriter) {
this.successItemWriter = successItemWriter;
this.failedItemWriter = failedItemWriter;
}
#Override
public ItemWriter<? super BillerOrder> classify(BillerOrder billerOrder) {
return billerOrder.getResult().equals("SUCCESS") ? successItemWriter : failedItemWriter;
}
}
In BatchConfiguration, I wrote classifierBillerOrderCompositeItemWriter method.
#Bean
public ClassifierCompositeItemWriter<BillerOrder> classifierBillerOrderCompositeItemWriter() throws Exception {
ClassifierCompositeItemWriter<BillerOrder> compositeItemWriter = new ClassifierCompositeItemWriter<>();
compositeItemWriter.setClassifier(new BillerOrderClassifier(successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter(), databaseToCsvFileJobConfig.databaseCsvItemWriter()));
return compositeItemWriter;
}
#Bean(name="step1")
public Step step1() throws Exception{
return stepBuilderFactory.get("step1")
.<BillerOrder, BillerOrder> chunk(10)
.reader((ItemReader<? extends BillerOrder>) reader())
.processor(processor())
.writer(classifierBillerOrderCompositeItemWriter())
.stream(successfulOrdersToCsvFileJobConfig.successfulDatabaseCsvItemWriter())
.stream(databaseToCsvFileJobConfig.databaseCsvItemWriter())
.build();
}

Spring Batch Reader is reading alternate records

I have created a sample spring batch application which is trying to read record from a DB and in writer, it displays those records. However, I could see that only even numbered (alternate) records are printed.
It's not the problem of database as the behavior is consistent with both H2 database or Oracle database.
There are total 100 records in my DB.
With JDBCCursorItemReader, only 50 records are read and that too alternate one as can be seen from log snapshot
With JdbcPagingItemReader, only 5 records are read and that too alternate one as can be seen from log snapshot
My code configurations are given below. Why reader is skipping odd numbered records?
#Bean
public ItemWriter<Safety> safetyWriter() {
return items -> {
for (Safety item : items) {
log.info(item.toString());
}
};
}
#Bean
public JdbcCursorItemReader<Safety> cursorItemReader() throws Exception {
JdbcCursorItemReader<Safety> reader = new JdbcCursorItemReader<>();
reader.setSql("select * from safety " );
reader.setDataSource(dataSource);
reader.setRowMapper(new SafetyRowMapper());
reader.setVerifyCursorPosition(false);
reader.afterPropertiesSet();
return reader;
}
#Bean
JdbcPagingItemReader<Safety> safetyPagingItemReader() throws Exception {
JdbcPagingItemReader<Safety> reader = new JdbcPagingItemReader<>();
reader.setDataSource(dataSource);
reader.setFetchSize(10);
reader.setRowMapper(new SafetyRowMapper());
H2PagingQueryProvider queryProvider = new H2PagingQueryProvider();
queryProvider.setSelectClause("*");
queryProvider.setFromClause("safety");
Map<String, Order> sortKeys = new HashMap<>(1);
sortKeys.put("id", Order.ASCENDING);
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
public Step importSafetyDetails() throws Exception {
return stepBuilderFactory.get("importSafetyDetails")
.<Safety, Safety>chunk(chunkSize)
//.reader(cursorItemReader())
.reader(safetyPagingItemReader())
.writer(safetyWriter())
.listener(new StepListener())
.listener(new ChunkListener())
.build();
}
#Bean
public Job job() throws Exception {
return jobBuilderFactory.get("job")
.start(importSafetyDetails())
.build();
}
Domain classes looks like below:
#NoArgsConstructor
#AllArgsConstructor
#Data
public class Safety {
private int id;
}
public class SafetyRowMapper implements RowMapper<Safety> {
#Override
public Safety mapRow(ResultSet resultSet, int i) throws SQLException {
if(resultSet.next()) {
Safety safety = new Safety();
safety.setId(resultSet.getInt("id"));
return safety;
}
return null;
}
}
#SpringBootApplication
#EnableBatchProcessing
public class SpringBatchSamplesApplication {
public static void main(String[] args) {
SpringApplication.run(SpringBatchSamplesApplication.class, args);
}
}
application.yml configuration is as below:
spring:
application:
name: spring-batch-samples
main:
allow-bean-definition-overriding: true
datasource:
url: jdbc:h2:mem:testdb;DB_CLOSE_DELAY=-1;DB_CLOSE_ON_EXIT=FALSE
username: sa
password:
driver-class-name: org.h2.Driver
hikari:
connection-timeout: 20000
maximum-pool-size: 10
h2:
console:
enabled: true
batch:
initialize-schema: never
server:
port: 9090
sqls are as below:
CREATE TABLE safety (
id int NOT NULL,
CONSTRAINT PK_ID PRIMARY KEY (id)
);
INSERT INTO safety (id) VALUES (1);
...100 records are inserted
Listeners classes are as below:
#Slf4j
public class StepListener{
#AfterStep
public ExitStatus afterStep(StepExecution stepExecution) {
log.info("In step {} ,Exit Status: {} ,Read Records: {} ,Committed Records: {} ,Skipped Read Records: {} ,Skipped Write Records: {}",
stepExecution.getStepName(),
stepExecution.getExitStatus().getExitCode(),
stepExecution.getReadCount(),
stepExecution.getCommitCount(),
stepExecution.getReadSkipCount(),
stepExecution.getWriteSkipCount());
return stepExecution.getExitStatus();
}
}
#Slf4j
public class ChunkListener {
#BeforeChunk
public void beforeChunk(ChunkContext context) {
log.info("<< Before the chunk");
}
#AfterChunk
public void afterChunk(ChunkContext context) {
log.info("<< After the chunk");
}
}
I tried to reproduce your problem, but I couldn't. Maybe it would be great if you could share more code.
Meanwhile I created a simple job to read 100 records from "safety" table a print them to the console. And it is working fine.
.
#SpringBootApplication
#EnableBatchProcessing
public class ReaderWriterProblem implements CommandLineRunner {
#Autowired
DataSource dataSource;
#Autowired
StepBuilderFactory stepBuilderFactory;
#Autowired
JobBuilderFactory jobBuilderFactory;
#Autowired
private JobLauncher jobLauncher;
#Autowired
private ApplicationContext context;
public static void main(String[] args) {
String[] arguments = new String[]{LocalDateTime.now().toString()};
SpringApplication.run(ReaderWriterProblem.class, arguments);
}
#Bean
public ItemWriter<Safety> safetyWriter() {
return new ItemWriter<Safety>() {
#Override
public void write(List<? extends Safety> items) throws Exception {
for (Safety item : items) {
//log.info(item.toString());
System.out.println(item);
}
}
};
}
// #Bean
// public JdbcCursorItemReader<Safety> cursorItemReader() throws Exception {
// JdbcCursorItemReader<Safety> reader = new JdbcCursorItemReader<>();
//
// reader.setSql("select * from safety ");
// reader.setDataSource(dataSource);
// reader.setRowMapper(new SafetyRowMapper());
// reader.setVerifyCursorPosition(false);
// reader.afterPropertiesSet();
//
// return reader;
// }
#Bean
JdbcPagingItemReader<Safety> safetyPagingItemReader() throws Exception {
JdbcPagingItemReader<Safety> reader = new JdbcPagingItemReader<>();
reader.setDataSource(dataSource);
reader.setFetchSize(10);
reader.setRowMapper(new SafetyRowMapper());
PostgresPagingQueryProvider queryProvider = new PostgresPagingQueryProvider();
queryProvider.setSelectClause("*");
queryProvider.setFromClause("safety");
Map<String, Order> sortKeys = new HashMap<>(1);
sortKeys.put("id", Order.ASCENDING);
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
public Step importSafetyDetails() throws Exception {
return stepBuilderFactory.get("importSafetyDetails")
.<Safety, Safety>chunk(5)
//.reader(cursorItemReader())
.reader(safetyPagingItemReader())
.writer(safetyWriter())
.listener(new MyStepListener())
.listener(new MyChunkListener())
.build();
}
#Bean
public Job job() throws Exception {
return jobBuilderFactory.get("job")
.listener(new JobListener())
.start(importSafetyDetails())
.build();
}
#Override
public void run(String... args) throws Exception {
JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
jobParametersBuilder.addString("date", LocalDateTime.now().toString());
try {
Job job = (Job) context.getBean("job");
jobLauncher.run(job, jobParametersBuilder.toJobParameters());
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobInstanceAlreadyCompleteException | JobParametersInvalidException e) {
e.printStackTrace();
}
}
public static class JobListener implements JobExecutionListener {
#Override
public void beforeJob(JobExecution jobExecution) {
System.out.println("Before job");
}
#Override
public void afterJob(JobExecution jobExecution) {
System.out.println("After job");
}
}
private static class SafetyRowMapper implements RowMapper<Safety> {
#Override
public Safety mapRow(ResultSet resultSet, int i) throws SQLException {
Safety safety = new Safety();
safety.setId(resultSet.getLong("ID"));
return safety;
}
}
public static class MyStepListener implements StepExecutionListener {
#Override
public void beforeStep(StepExecution stepExecution) {
System.out.println("Before Step");
}
#Override
public ExitStatus afterStep(StepExecution stepExecution) {
System.out.println("After Step");
return ExitStatus.COMPLETED;
}
}
private static class MyChunkListener implements ChunkListener {
#Override
public void beforeChunk(ChunkContext context) {
System.out.println("Before Chunk");
}
#Override
public void afterChunk(ChunkContext context) {
System.out.println("After Chunk");
}
#Override
public void afterChunkError(ChunkContext context) {
}
}
}
Hope this helps

Spring batch run multiple jobs in parallel

I am new to Spring batch and couldn't figure out how to do this..
Basically I have a spring batch files and both are have to run parallel i.e when I request execute_job1 then BatchConfig1 have to execute and when I request execute_job2 then BatchConfig2 have to execute. How can I do this?
Controller
#RestController
public class JobExecutionController {
#Autowired
JobLauncher jobLauncher;
#Autowired
Job job;
/**
*
* #return
*/
#RequestMapping("/execute_job1")
#ResponseBody
public void executeBatchJob1() {
}
/**
*
* #return
*/
#RequestMapping("/execute_job2")
#ResponseBody
public void executeBatchJob2() {
}
}
BatchConfig1
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
public Step stepOne(){
return steps.get("stepOne")
.tasklet(new MyTaskOne())
.build();
}
#Bean
public Step stepTwo(){
return steps.get("stepTwo")
.tasklet(new MyTaskTwo())
.build();
}
#Bean
public Job demoJob(){
return jobs.get("exportUserJob1")
.incrementer(new RunIdIncrementer())
.start(stepOne())
.next(stepTwo())
.build();
}
}
BatchConfig2:
#Configuration
#EnableBatchProcessing
public class BatchConfig2 {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public DataSource dataSource;
#Bean
public JdbcCursorItemReader<User> reader() {
JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<User>();
reader.setDataSource(dataSource);
reader.setSql("SELECT id,name FROM user");
reader.setRowMapper(new UserRowMapper());
return reader;
}
public class UserRowMapper implements RowMapper<User> {
#Override
public User mapRow(ResultSet rs, int rowNum) throws SQLException {
User user = new User();
user.setId(rs.getInt("id"));
user.setName(rs.getString("name"));
return user;
}
}
#Bean
public UserItemProcessor processor() {
return new UserItemProcessor();
}
#Bean
public FlatFileItemWriter<User> writer() {
FlatFileItemWriter<User> writer = new FlatFileItemWriter<User>();
writer.setResource(new ClassPathResource("users.csv"));
writer.setLineAggregator(new DelimitedLineAggregator<User>() {
{
setDelimiter(",");
setFieldExtractor(new BeanWrapperFieldExtractor<User>() {
{
setNames(new String[] { "id", "name" });
}
});
}
});
return writer;
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1").<User, User>chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
#Bean
public Job exportUserJob() {
return jobBuilderFactory.get("exportUserJob2").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
}
You can run a job with JobLauncher. The code for the controller:
#RestController
public class JobExecutionController {
public JobExecutionController(JobLauncher jobLauncher,
#Qualifier("demoJob") Job demoJob,
#Qualifier("exportUserJob") Job exportUserJob) {
this.jobLauncher = jobLauncher;
this.demoJob = demoJob;
this.exportUserJob = exportUserJob;
}
JobLauncher jobLauncher;
Job demoJob;
Job exportUserJob;
#RequestMapping("/execute_job1")
#ResponseBody
public void executeBatchJob1() {
try {
JobExecution jobExecution = jobLauncher.run(demoJob, new JobParameters(generateJobParameter()));
log.info("Job started in thread :" + jobExecution.getJobParameters().getString("JobThread"));
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | JobInstanceAlreadyCompleteException e) {
log.error("Something sent wrong during job execution", e);
}
}
#RequestMapping("/execute_job2")
#ResponseBody
public void executeBatchJob2() {
try {
JobExecution jobExecution = jobLauncher.run(exportUserJob, new JobParameters(generateJobParameter()));
log.info("Job started in thread :" + jobExecution.getJobParameters().getString("JobThread"));
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | JobInstanceAlreadyCompleteException e) {
log.error("Something sent wrong during job execution", e);
}
}
private JobParameters generateJobParameter() {
Map<String, JobParameter> parameters = new HashMap<>();
parameters.put("Job start time", new JobParameter(Instant.now().toEpochMilli()));
parameters.put("JobThread", new JobParameter(Thread.currentThread().getId()));
return new JobParameters(parameters);
}
}
To prevent starting your jobs at the application start add to the application.properties next spring batch configuration: spring.batch.job.enabled=false

Spring Boot + Spring Batch Multiple Job Creation and Scheduling

I created a Spring Boot with Spring Batch Application and Scheduling. When i create only one job, things are working fine . But when i try to create another job using the modular approach, I am getting few errors like reader is already closed and some errors related to version even though i am using different readers. The jobs and it's step are running many times and they are getting duplicated.
Can anyone Please guide me how to resolve these issues and run the jobs in a parallel way independent of each other ?
Below are the configuration Classes :
ModularJobConfiguration.java , DeptBatchConfiguration.java and CityBatchConfiguration.java and BatchScheduler.java
#Configuration
#EnableBatchProcessing(modular=true)
public class ModularJobConfiguration {
#Bean
public ApplicationContextFactory firstJob() {
return new GenericApplicationContextFactory(DeptBatchConfiguration.class);
}
#Bean
public ApplicationContextFactory secondJob() {
return new GenericApplicationContextFactory(CityBatchConfiguration.class);
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class DeptBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(DeptBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<DepartmentModelReader> deptReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performFirstJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("Job1 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID1",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importDeptJob(jobBuilderFactory,stepdept(deptReaderSO,customWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job1 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<DepartmentModelReader> deptReaderSO() {
//LOGGER.info("Inside deptReaderSO Method");
JdbcCursorItemReader<DepartmentModelReader> deptReaderSO = new JdbcCursorItemReader<>();
//deptReaderSO.setSql("select id, firstName, lastname, random_num from reader");
deptReaderSO.setSql("SELECT DEPT_CODE,DEPT_NAME,FULL_DEPT_NAME,CITY_CODE,CITY_NAME,CITY_TYPE_NAME,CREATED_USER_ID,CREATED_G_DATE,MODIFIED_USER_ID,MODIFIED_G_DATE,RECORD_ACTIVITY,DEPT_CLASS,DEPT_PARENT,DEPT_PARENT_NAME FROM TBL_SAMPLE_SAFTY_DEPTS");
deptReaderSO.setDataSource(dataSourceReader);
deptReaderSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
DepartmentModelReader recordSO = new DepartmentModelReader();
recordSO.setDeptCode(resultSet.getString("DEPT_CODE"));
recordSO.setDeptName(resultSet.getString("DEPT_NAME"));
recordSO.setFullDeptName(resultSet.getString("FULL_DEPT_NAME"));
recordSO.setCityCode(resultSet.getInt("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedUserId(resultSet.getInt("CREATED_USER_ID"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
recordSO.setDeptClass(resultSet.getInt("DEPT_CLASS"));
recordSO.setDeptParent(resultSet.getString("DEPT_PARENT"));
recordSO.setDeptParentName(resultSet.getString("DEPT_PARENT_NAME"));
// LOGGER.info("RowMapper record : {}", recordSO.getDeptCode() +" | "+recordSO.getDeptName());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return deptReaderSO;
}
#Bean
public ItemProcessor<DepartmentModelReader, DepartmentModelWriter> processor() {
//LOGGER.info("Inside Processor Method");
return new RecordProcessor();
}
#Bean
public ItemWriter<DepartmentModelWriter> customWriter(){
//LOGGER.info("Inside customWriter Method");
return new CustomItemWriter();
}
#Bean
public Job importDeptJob(JobBuilderFactory jobs, Step stepdept,JobExecutionListener listener){
return jobs.get("importDeptJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepdept).end().build();
}
#Bean
public Step stepdept(ItemReader<DepartmentModelReader> deptReaderSO,
ItemWriter<DepartmentModelWriter> writerSO) {
LOGGER.info("Inside stepdept Method");
return stepBuilderFactory.get("stepdept").<DepartmentModelReader, DepartmentModelWriter>chunk(5)
.reader(deptReaderSO).processor(processor()).writer(customWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class CityBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CityBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<CitiesModelReader> citiesReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performSecondJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("\n Job2 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID2",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importCitiesJob(jobBuilderFactory,stepcity(citiesReaderSO,customCitiesWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job2 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<CitiesModelReader> citiesReaderSO() {
//LOGGER.info("Inside readerSO Method");
JdbcCursorItemReader<CitiesModelReader> readerSO = new JdbcCursorItemReader<>();
readerSO.setSql("SELECT CITY_CODE,CITY_NAME,PARENT_CITY,CITY_TYPE,CITY_TYPE_NAME,CREATED_G_DATE,CREATED_USER_ID,MODIFIED_G_DATE,MODIFIED_USER_ID,RECORD_ACTIVITY FROM TBL_SAMPLE_SAFTY_CITIES");
readerSO.setDataSource(dataSourceReader);
readerSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
CitiesModelReader recordSO = new CitiesModelReader();
recordSO.setCityCode(resultSet.getLong("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setParentCity(resultSet.getInt("PARENT_CITY"));
recordSO.setCityType(resultSet.getString("CITY_TYPE"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setCreatedUserId(resultSet.getString("CREATED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
//LOGGER.info("RowMapper record : {}", recordSO.toString());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return readerSO;
}
#Bean
public ItemProcessor<CitiesModelReader,CitiesModelWriter> citiesProcessor() {
//LOGGER.info("Inside Processor Method");
return new RecordCitiesProcessor();
}
#Bean
public ItemWriter<CitiesModelWriter> customCitiesWriter(){
LOGGER.info("Inside customCitiesWriter Method");
return new CustomCitiesWriter();
}
#Bean
public Job importCitiesJob(JobBuilderFactory jobs, Step stepcity,JobExecutionListener listener) {
LOGGER.info("Inside importCitiesJob Method");
return jobs.get("importCitiesJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepcity).end().build();
}
#Bean
public Step stepcity(ItemReader<CitiesModelReader> readerSO,
ItemWriter<CitiesModelWriter> writerSO) {
LOGGER.info("Inside stepCity Method");
return stepBuilderFactory.get("stepcity").<CitiesModelReader, CitiesModelWriter>chunk(5)
.reader(readerSO).processor(citiesProcessor()).writer(customCitiesWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableScheduling
public class BatchScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(BatchScheduler.class);
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager txManager) throws Exception {
LOGGER.info("Inside mapJobRepositoryFactory method");
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(txManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
LOGGER.info("Inside jobRepository method");
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
LOGGER.info("Inside jobLauncher method");
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
final SimpleAsyncTaskExecutor simpleAsyncTaskExecutor = new SimpleAsyncTaskExecutor();
launcher.setTaskExecutor(simpleAsyncTaskExecutor);
return launcher;
}
}
Your readers are not thread safe and not step scoped. Because of that, you're running into concurrency issues. Configure each of your stateful ItemReaders (the ones that implement ItemStream like the JdbcCursorItemReader), to be step scoped by adding the #StepScope annotation and things should work fine.

Failed to run job based on schedule

I'm tying to run the batch based on schedule.I have used scheduled annotation & cron expression. Batch is running only once. No error is getting displayed. I have added maven dependency for quartz.I have not added any XML file.
#EnableAutoConfiguration(exclude = { DataSourceAutoConfiguration.class,SwaggerConfig.class,
WebMvcAutoConfiguration.class,RepositoryRestMvcAutoConfiguration.class })
#EnableScheduling
#ComponentScan
public class BatchApplication {
public static void main(String[] args) throws Exception {
SpringApplication app = new SpringApplication(BatchApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext ctx = app.run(args);
System.out.println(ctx.getBean(DataSource.class));
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
Job addLeaveAllocationJob = ctx.getBean("addLeaveAllocationJob", Job.class);
JobParameters jobParameters = new JobParametersBuilder().addDate("date", new Date())
.toJobParameters();
JobExecution jobExecution = jobLauncher.run(addLeaveAllocationJob, jobParameters);
BatchStatus batchStatus = jobExecution.getStatus();
while(batchStatus.isRunning()){
System.out.println("*** Still Running ************");
Thread.sleep(2000);
}
}
}
I have job class which is scheduled with #scheduled annotation with cron expression.
#Configuration
#EnableBatchProcessing
#Component
public class LeaveAllocationJobConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private EntityManagerFactory entityManagerFactory;
#Bean
public ItemReader<Employee> reader() {
JpaPagingItemReader<Employee> employeeReader = new JpaPagingItemReader<Employee>();
employeeReader.setEntityManagerFactory(entityManagerFactory);
employeeReader.setQueryString("from Employee");
return employeeReader;
}
#Bean
#Scheduled(cron="0 0/1 * 1/1 * ? *")
public Job addLeaveAllocationJob() {
System.out.println("Hello");
return jobs.get("addLeaveAllocationJob").listener(protocolListener()).start(step()).build();
}
#Bean
public Step step() {
// important to be one in this case to commit after every line read
return stepBuilderFactory.get("step").<Employee, EmployeeDTO> chunk(1).reader(reader()).processor(processor())
.writer(writer()).build();
}
/**
* #return
*/
#Bean
public ItemWriter<? super EmployeeDTO> writer() {
return new ItemWriter<EmployeeDTO>() {
#Override
public void write(List<? extends EmployeeDTO> items) throws Exception {
System.out.println("Processing " + items);
}
};
}
#Bean
public ItemProcessor<Employee, EmployeeDTO> processor() {
return new ItemProcessor<Employee, EmployeeDTO>() {
#Override
public EmployeeDTO process(Employee employee) throws Exception {
return new EmployeeDTO(employee);
}
};
}
#Bean
public ProtocolListener protocolListener() {
return new ProtocolListener();
}
}
Please help me to solve the issue
Chek this link.enter link description here

Resources