Spring Batch SkippListener not printing logs - spring

I am not sure why the SkipListener is not printing the simple logs. I want to log the skipped records into a separate log file and I am thinking I can use MDC.put(). But I am not even able to print a log in console not sure what is happening. I think I am missing something. A help would be really appreciated. I even tried with generic Exception.class for testing but still not doing anything. Here is my code;
BtachConfiguration
#EnableBatchProcessing
public class BatchConfiguration {
static final Logger LOG = LogManager.getLogger(BatchConfiguration.class);
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public DataSource dataSource;
#Bean
public Job loadUserJob() {
return jobBuilderFactory.get("loadUserJob")
.incrementer(new RunIdIncrementer())
.start(loadUsersStep())
.listener(new JobLoggerListener())
.build();
}
#Bean
public Step loadUsersStep() {
return stepBuilderFactory.get("loadUsersStep")
.<UserInfo, UserInfoDTO> chunk(10)
.reader(reader())
.processor(UserInfoItemProcessor())
.writer(writer())
.faultTolerant()
.skipLimit(3)
.skip(Exception.class)
//.skip(UserNotFoundException.class)
.listener(new StepStartStopListener())
.listener(new LoadDataSkipListener())
.build();
}
#Bean
public FlatFileItemReader<UserInfo> reader() {
return new FlatFileItemReaderBuilder<UserInfo>()
.name("reader")
.delimited()
.names(new String[] {
"first_name",
"last_name",
"email"})
.targetType(UserInfo.class)
.resource(new ClassPathResource("userinfo_file.csv"))
.build();
}
#Bean
public UserInfoItemProcessor UserInfoItemProcessor() {
return new UserInfoItemProcessor();
}
#Bean
public JdbcBatchItemWriter<UserInfoDTO> writer() {
JdbcBatchItemWriter<UserInfoDTO> writer = new JdbcBatchItemWriter<UserInfoDTO>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>());
writer.setSql("INSERT INTO db1.userinfo "
+ "(first_name,last_name,email) " +
"VALUES (:firstName, :lastName,:email)");
writer.setDataSource(dataSource);
return writer;
}
}
SkipListener Class
public class LoadDataSkipListener implements SkipListener<UserInfoDTO, UserInfoDTO>{
public static final Logger LOG = LogManager.getLogger(LoadDataSkipListener.class);
#Override
public void onSkipInRead(Throwable t) {
LOG.info("ItemWriter: ");
}
#Override
public void onSkipInWrite(UserInfoDTO item, Throwable t) {
LOG.info(">>> onSkipInWrite <<<");
// MDC.put("skipped_users", String.valueOf(item.getUserId())); // I want to log skipped user ids on write
}
#Override
public void onSkipInProcess(UserInfoDTO item, Throwable t) {
LOG.info(">>> onSkipInProcess<<< ");
}
}
application.properties
spring.datasource.url=jdbc:mysql://localhost:3306/db1?useSSL=false
spring.datasource.username=username
spring.datasource.password=password
spring.datasource.schema=classpath:/org/springframework/batch/core/schema-mysql.sql
spring.batch.initialize-schema=always
spring.datasource.initialize=true
#logging.file={log-name}.log --> WIll this write into a new file ??
Just want to know
Why its not even printing in console ?
Can the commented MDC be used to write to a new file ?
Thanks in advance!

Related

Spring batch run multiple jobs in parallel

I am new to Spring batch and couldn't figure out how to do this..
Basically I have a spring batch files and both are have to run parallel i.e when I request execute_job1 then BatchConfig1 have to execute and when I request execute_job2 then BatchConfig2 have to execute. How can I do this?
Controller
#RestController
public class JobExecutionController {
#Autowired
JobLauncher jobLauncher;
#Autowired
Job job;
/**
*
* #return
*/
#RequestMapping("/execute_job1")
#ResponseBody
public void executeBatchJob1() {
}
/**
*
* #return
*/
#RequestMapping("/execute_job2")
#ResponseBody
public void executeBatchJob2() {
}
}
BatchConfig1
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
public Step stepOne(){
return steps.get("stepOne")
.tasklet(new MyTaskOne())
.build();
}
#Bean
public Step stepTwo(){
return steps.get("stepTwo")
.tasklet(new MyTaskTwo())
.build();
}
#Bean
public Job demoJob(){
return jobs.get("exportUserJob1")
.incrementer(new RunIdIncrementer())
.start(stepOne())
.next(stepTwo())
.build();
}
}
BatchConfig2:
#Configuration
#EnableBatchProcessing
public class BatchConfig2 {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public DataSource dataSource;
#Bean
public JdbcCursorItemReader<User> reader() {
JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<User>();
reader.setDataSource(dataSource);
reader.setSql("SELECT id,name FROM user");
reader.setRowMapper(new UserRowMapper());
return reader;
}
public class UserRowMapper implements RowMapper<User> {
#Override
public User mapRow(ResultSet rs, int rowNum) throws SQLException {
User user = new User();
user.setId(rs.getInt("id"));
user.setName(rs.getString("name"));
return user;
}
}
#Bean
public UserItemProcessor processor() {
return new UserItemProcessor();
}
#Bean
public FlatFileItemWriter<User> writer() {
FlatFileItemWriter<User> writer = new FlatFileItemWriter<User>();
writer.setResource(new ClassPathResource("users.csv"));
writer.setLineAggregator(new DelimitedLineAggregator<User>() {
{
setDelimiter(",");
setFieldExtractor(new BeanWrapperFieldExtractor<User>() {
{
setNames(new String[] { "id", "name" });
}
});
}
});
return writer;
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1").<User, User>chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
#Bean
public Job exportUserJob() {
return jobBuilderFactory.get("exportUserJob2").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
}
You can run a job with JobLauncher. The code for the controller:
#RestController
public class JobExecutionController {
public JobExecutionController(JobLauncher jobLauncher,
#Qualifier("demoJob") Job demoJob,
#Qualifier("exportUserJob") Job exportUserJob) {
this.jobLauncher = jobLauncher;
this.demoJob = demoJob;
this.exportUserJob = exportUserJob;
}
JobLauncher jobLauncher;
Job demoJob;
Job exportUserJob;
#RequestMapping("/execute_job1")
#ResponseBody
public void executeBatchJob1() {
try {
JobExecution jobExecution = jobLauncher.run(demoJob, new JobParameters(generateJobParameter()));
log.info("Job started in thread :" + jobExecution.getJobParameters().getString("JobThread"));
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | JobInstanceAlreadyCompleteException e) {
log.error("Something sent wrong during job execution", e);
}
}
#RequestMapping("/execute_job2")
#ResponseBody
public void executeBatchJob2() {
try {
JobExecution jobExecution = jobLauncher.run(exportUserJob, new JobParameters(generateJobParameter()));
log.info("Job started in thread :" + jobExecution.getJobParameters().getString("JobThread"));
} catch (JobExecutionAlreadyRunningException | JobRestartException | JobParametersInvalidException | JobInstanceAlreadyCompleteException e) {
log.error("Something sent wrong during job execution", e);
}
}
private JobParameters generateJobParameter() {
Map<String, JobParameter> parameters = new HashMap<>();
parameters.put("Job start time", new JobParameter(Instant.now().toEpochMilli()));
parameters.put("JobThread", new JobParameter(Thread.currentThread().getId()));
return new JobParameters(parameters);
}
}
To prevent starting your jobs at the application start add to the application.properties next spring batch configuration: spring.batch.job.enabled=false

How to reset MultiResourceItemReader for each job run . Step scope not working

How can I initilize the MultiResourceItemReader for each job run . currently with this setup its still using the same instance for each job run
I put the #StepScope still its using the same old list of files which it has already been processed. I am not sure what else I have to add in this code
I also tried with the #JobScope also it did not work out. there is something fundamental I am missing
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Value("file:ftp-inbound/*.csv")
#Autowired
private Resource[] inputResources;
#Autowired
private StepBuilderFactory steps;
#Autowired
private JobBuilderFactory jobs;
#Autowired
private ResourceLoader resourceLoader;
#Bean
public FlatFileItemReader<AccommodationRoomAvailability> itemReader() throws UnexpectedInputException, ParseException, IOException {
FlatFileItemReader<AccommodationRoomAvailability> reader = new FlatFileItemReader<AccommodationRoomAvailability>();
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
String[] tokens = {"Product ID", "Allotment", "Kamertype", "Zoeknaam", "Hotel", "Datum", "Beschikbaar", "Nachten"};
tokenizer.setNames(tokens);
tokenizer.setDelimiter(";");
tokenizer.setStrict(true);
reader.setLinesToSkip(1);
DefaultLineMapper<AccommodationRoomAvailability> lineMapper = new DefaultLineMapper<AccommodationRoomAvailability>();
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(new RecordFieldSetMapper());
reader.setLineMapper(lineMapper);
return reader;
}
#Bean
#Qualifier("multiResourceReader")
#StepScope
public MultiResourceItemReader<AccommodationRoomAvailability> multiResourceItemReader() throws Exception {
MultiResourceItemReader<AccommodationRoomAvailability> resourceItemReader = new MultiResourceItemReader<AccommodationRoomAvailability>();
resourceItemReader.setResources(inputResources);
resourceItemReader.setDelegate(itemReader());
resourceItemReader.setStrict(false);
resourceItemReader.setSaveState(false);
// resourceItemReader.read();
return resourceItemReader;
}
#Bean
public ItemProcessor<AccommodationRoomAvailability, String> itemProcessor() {
return new AvailabilityProcessor();
}
#Bean
public ItemWriter itemWriter() {
return new ItemWriter() {
#Override
public void write(List list) throws Exception {
}
};
}
#Bean
protected Step step1(#Qualifier("multiResourceReader") MultiResourceItemReader<AccommodationRoomAvailability> reader, ItemProcessor<AccommodationRoomAvailability, String> processor,
ItemWriter writer) {
return steps.get("step1")/*.listener(new StepListener())*/.<AccommodationRoomAvailability, String>chunk(30000).reader(reader)
.processor(processor)
.writer(writer)
.build();
}
#Bean
public Step step2() throws IOException {
FileDeletingTasklet task = new FileDeletingTasklet();
task.setResources(inputResources);
return stepBuilderFactory.get("step2")
.tasklet(task)
.build();
}
#Bean(name = "job")
public Job job(#Qualifier("step1") Step step1, Step step2) throws IOException {
return jobs.get("job")
.start(step1).on("*").to(step2).end()
// .flow(step1).on("").to(step2()).end()
.build();
}
}
Once your application context is created, the injected resources #Value("file:ftp-inbound/*.csv") will be the same during the whole lifetime of your app. That's why the reader will always read the same values.
You need to pass these resources as a parameter to your job and late-bind them in your reader with Step scope. In your example it would be something like:
#Bean
#Qualifier("multiResourceReader")
#StepScope
public MultiResourceItemReader<AccommodationRoomAvailability> multiResourceItemReader(#Value("#{jobParameters['inputResources']}") Resource[] inputResources) throws Exception {
MultiResourceItemReader<AccommodationRoomAvailability> resourceItemReader = new MultiResourceItemReader<AccommodationRoomAvailability>();
resourceItemReader.setResources(inputResources);
resourceItemReader.setDelegate(itemReader());
resourceItemReader.setStrict(false);
resourceItemReader.setSaveState(false);
return resourceItemReader;
}
Then pass input resources as a parameter to your job:
JobParameters jobParameters = new JobParametersBuilder()
.addString("inputResources", "file:ftp-inbound/*.csv")
.toJobParameters();
currently with this setup its still using the same instance for each job run
That's because your resources are always the same when they are injected in a field of your configuration class. If you use the job parameters approach I mentioned in the previous example, you will have a different instance if you run the job with different set of files.

MongoDB Spring Batch job using Gradle

I want to create an item reader, writer and processor for a Spring batch job using Gradle. I am having trouble with a few things. The delimited() portion is giving me an error. I am trying to read two fields for now: rxFname, rxLname.
Here is my code:
#Configuration
#EnableBatchProcessing
public class PaymentPortalJob {
private static final Logger LOG =
LoggerFactory.getLogger(PaymentPortalJob.class);
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
MongoItemReader <PaymentAudit> fileReader(){
return new MongoItemReaderBuilder <PaymentAudit>()
.name("file-reader")
.targetType(PaymentAudit.class)
.delimited().delimiter(",").names(new String [] {"rxFname" , "rxLname"})
.build();
}
#Bean
public ItemProcessor<PaymentAudit, PaymentAudit> processor() {
return new PaymentAuditItemProcessor();
}
#Bean
public ItemWriter<PaymentAudit> writer() {
return new MongoItemWriterBuilder()<PaymentAudit>();
try {
writer.setTemplate(mongoTemplate());
}catch (Exception e) {
LOG.error(e.toString());
}
writer.setCollection("paymentAudit");
return writer;
}
#Bean
Job job (JobBuilderFactory jbf,
StepBuilderFactory sbf,
ItemReader<? extends PaymentAudit> ir,
ItemWriter<? super PaymentAudit> iw) {
Step s1 = sbf.get("file-db")
.<PaymentAudit, PaymentAudit>chunk(100)
.reader(ir)
.writer(iw)
.build();
return jbf.get("etl")
.incrementer(new RunIdIncrementer())
.start(s1)
.build();
}
#Bean
public MongoDbFactory mongoDbFactory() throws Exception {
return new SimpleMongoDbFactory(new MongoClient(), "db-name");
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory());
return mongoTemplate;
}
}

Spring batch - get information about files in a directory

So I'm toying around with Spring Batch for the first time and trying to understand how to do things other than process a CSV file.
Attempting to read every music file in a directory for example, I have the following code but I'm not sure how to handle the Delegate part.
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
public MusicItemProcessor processor() {
return new MusicItemProcessor();
}
#Bean
public Job readFiles() {
return jobBuilderFactory.get("readFiles").incrementer(new RunIdIncrementer()).
flow(step1()).end().build();
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1").<String, String>chunk(10)
.reader(reader())
.processor(processor()).build();
}
#Bean
public ItemReader<String> reader() {
Resource[] resources = null;
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
try {
resources = patternResolver.getResources("file:/music/*.flac");
} catch (IOException e) {
e.printStackTrace();
}
MultiResourceItemReader<String> reader = new MultiResourceItemReader<>();
reader.setResources(resources);
reader.setDelegate(new FlatFileItemReader<>()); // ??
return reader;
}
}
At the moment I can see that resources has a list of music files, but looking at the stacktrace I get back, it looks to me like new FlatFileItemReader<>() is trying to read the actual content of the files (I'll want to do that at some point, just not right now).
At the moment I just want the information about the file (absolute path, size, filename etc), not what's inside.
Have I gone completely wrong with this? Or do I just need to configure something a little different?
Any examples of code that does more than process CSV lines would also be awesome
After scouring the internet I've managed to pull together something that I think works... Some feedback would be welcome.
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
public VideoItemProcessor processor() {
return new VideoItemProcessor();
}
#Bean
public Job readFiles() {
return jobBuilderFactory.get("readFiles")
.start(step())
.build();
}
#Bean
public Step step() {
try {
return stepBuilderFactory.get("step").<File, Video>chunk(500)
.reader(directoryItemReader())
.processor(processor())
.build();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
#Bean
public DirectoryItemReader directoryItemReader() throws IOException {
return new DirectoryItemReader("file:/media/media/Music/**/*.flac");
}
}
The part that had me stuck with creating a custom reader for files. If anyone else comes across this, this is how I've done it. I'm sure there are better ways but this works for me
public class DirectoryItemReader implements ItemReader<File>, InitializingBean {
private final String directoryPath;
private final List<File> foundFiles = Collections.synchronizedList(new ArrayList<>());
public DirectoryItemReader(final String directoryPath) {
this.directoryPath = directoryPath;
}
#Override
public File read() {
if (!foundFiles.isEmpty()) {
return foundFiles.remove(0);
}
synchronized (foundFiles) {
final Iterator files = foundFiles.iterator();
if (files.hasNext()) {
return foundFiles.remove(0);
}
}
return null;
}
#Override
public void afterPropertiesSet() throws Exception {
for (final Resource file : getFiles()) {
this.foundFiles.add(file.getFile());
}
}
private Resource[] getFiles() throws IOException {
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
return patternResolver.getResources(directoryPath);
}
}
The only thing you'd need to do is implement your own processor. I've used Videos in this example, so I have a video processor
#Slf4j
public class VideoItemProcessor implements ItemProcessor<File, Video> {
#Override
public Video process(final File item) throws Exception {
Video video = Video.builder()
.filename(item.getAbsoluteFile().getName())
.absolutePath(item.getAbsolutePath())
.fileSize(item.getTotalSpace())
.build();
log.info("Created {}", video);
return video;
}
}

Spring Boot + Spring Batch Multiple Job Creation and Scheduling

I created a Spring Boot with Spring Batch Application and Scheduling. When i create only one job, things are working fine . But when i try to create another job using the modular approach, I am getting few errors like reader is already closed and some errors related to version even though i am using different readers. The jobs and it's step are running many times and they are getting duplicated.
Can anyone Please guide me how to resolve these issues and run the jobs in a parallel way independent of each other ?
Below are the configuration Classes :
ModularJobConfiguration.java , DeptBatchConfiguration.java and CityBatchConfiguration.java and BatchScheduler.java
#Configuration
#EnableBatchProcessing(modular=true)
public class ModularJobConfiguration {
#Bean
public ApplicationContextFactory firstJob() {
return new GenericApplicationContextFactory(DeptBatchConfiguration.class);
}
#Bean
public ApplicationContextFactory secondJob() {
return new GenericApplicationContextFactory(CityBatchConfiguration.class);
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class DeptBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(DeptBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<DepartmentModelReader> deptReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performFirstJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("Job1 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID1",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importDeptJob(jobBuilderFactory,stepdept(deptReaderSO,customWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job1 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<DepartmentModelReader> deptReaderSO() {
//LOGGER.info("Inside deptReaderSO Method");
JdbcCursorItemReader<DepartmentModelReader> deptReaderSO = new JdbcCursorItemReader<>();
//deptReaderSO.setSql("select id, firstName, lastname, random_num from reader");
deptReaderSO.setSql("SELECT DEPT_CODE,DEPT_NAME,FULL_DEPT_NAME,CITY_CODE,CITY_NAME,CITY_TYPE_NAME,CREATED_USER_ID,CREATED_G_DATE,MODIFIED_USER_ID,MODIFIED_G_DATE,RECORD_ACTIVITY,DEPT_CLASS,DEPT_PARENT,DEPT_PARENT_NAME FROM TBL_SAMPLE_SAFTY_DEPTS");
deptReaderSO.setDataSource(dataSourceReader);
deptReaderSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
DepartmentModelReader recordSO = new DepartmentModelReader();
recordSO.setDeptCode(resultSet.getString("DEPT_CODE"));
recordSO.setDeptName(resultSet.getString("DEPT_NAME"));
recordSO.setFullDeptName(resultSet.getString("FULL_DEPT_NAME"));
recordSO.setCityCode(resultSet.getInt("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedUserId(resultSet.getInt("CREATED_USER_ID"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
recordSO.setDeptClass(resultSet.getInt("DEPT_CLASS"));
recordSO.setDeptParent(resultSet.getString("DEPT_PARENT"));
recordSO.setDeptParentName(resultSet.getString("DEPT_PARENT_NAME"));
// LOGGER.info("RowMapper record : {}", recordSO.getDeptCode() +" | "+recordSO.getDeptName());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return deptReaderSO;
}
#Bean
public ItemProcessor<DepartmentModelReader, DepartmentModelWriter> processor() {
//LOGGER.info("Inside Processor Method");
return new RecordProcessor();
}
#Bean
public ItemWriter<DepartmentModelWriter> customWriter(){
//LOGGER.info("Inside customWriter Method");
return new CustomItemWriter();
}
#Bean
public Job importDeptJob(JobBuilderFactory jobs, Step stepdept,JobExecutionListener listener){
return jobs.get("importDeptJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepdept).end().build();
}
#Bean
public Step stepdept(ItemReader<DepartmentModelReader> deptReaderSO,
ItemWriter<DepartmentModelWriter> writerSO) {
LOGGER.info("Inside stepdept Method");
return stepBuilderFactory.get("stepdept").<DepartmentModelReader, DepartmentModelWriter>chunk(5)
.reader(deptReaderSO).processor(processor()).writer(customWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableBatchProcessing
#Import({BatchScheduler.class})
public class CityBatchConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CityBatchConfiguration.class);
#Autowired
private SimpleJobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public JobExecutionListener listener;
public ItemReader<CitiesModelReader> citiesReaderSO;
#Autowired
#Qualifier("dataSourceReader")
private DataSource dataSourceReader;
#Autowired
#Qualifier("dataSourceWriter")
private DataSource dataSourceWriter;
#Scheduled(cron = "0 0/1 * * * ?")
public void performSecondJob() throws Exception {
long startTime = System.currentTimeMillis();
LOGGER.info("\n Job2 Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID2",String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = (JobExecution) jobLauncher.run(importCitiesJob(jobBuilderFactory,stepcity(citiesReaderSO,customCitiesWriter()),listener), param);
long endTime = System.currentTimeMillis();
LOGGER.info("Job2 finished at " + (endTime - startTime) / 1000 + " seconds with status :" + execution.getExitStatus());
}
#Bean
public ItemReader<CitiesModelReader> citiesReaderSO() {
//LOGGER.info("Inside readerSO Method");
JdbcCursorItemReader<CitiesModelReader> readerSO = new JdbcCursorItemReader<>();
readerSO.setSql("SELECT CITY_CODE,CITY_NAME,PARENT_CITY,CITY_TYPE,CITY_TYPE_NAME,CREATED_G_DATE,CREATED_USER_ID,MODIFIED_G_DATE,MODIFIED_USER_ID,RECORD_ACTIVITY FROM TBL_SAMPLE_SAFTY_CITIES");
readerSO.setDataSource(dataSourceReader);
readerSO.setRowMapper(
(ResultSet resultSet, int rowNum) -> {
if (!(resultSet.isAfterLast()) && !(resultSet.isBeforeFirst())) {
CitiesModelReader recordSO = new CitiesModelReader();
recordSO.setCityCode(resultSet.getLong("CITY_CODE"));
recordSO.setCityName(resultSet.getString("CITY_NAME"));
recordSO.setParentCity(resultSet.getInt("PARENT_CITY"));
recordSO.setCityType(resultSet.getString("CITY_TYPE"));
recordSO.setCityTypeName(resultSet.getString("CITY_TYPE_NAME"));
recordSO.setCreatedGDate(resultSet.getDate("CREATED_G_DATE"));
recordSO.setCreatedUserId(resultSet.getString("CREATED_USER_ID"));
recordSO.setModifiedGDate(resultSet.getDate("MODIFIED_G_DATE"));
recordSO.setModifiedUserId(resultSet.getString("MODIFIED_USER_ID"));
recordSO.setRecordActivity(resultSet.getInt("RECORD_ACTIVITY"));
//LOGGER.info("RowMapper record : {}", recordSO.toString());
return recordSO;
} else {
LOGGER.info("Returning null from rowMapper");
return null;
}
});
return readerSO;
}
#Bean
public ItemProcessor<CitiesModelReader,CitiesModelWriter> citiesProcessor() {
//LOGGER.info("Inside Processor Method");
return new RecordCitiesProcessor();
}
#Bean
public ItemWriter<CitiesModelWriter> customCitiesWriter(){
LOGGER.info("Inside customCitiesWriter Method");
return new CustomCitiesWriter();
}
#Bean
public Job importCitiesJob(JobBuilderFactory jobs, Step stepcity,JobExecutionListener listener) {
LOGGER.info("Inside importCitiesJob Method");
return jobs.get("importCitiesJob")
.incrementer(new RunIdIncrementer())
.listener(listener())
.flow(stepcity).end().build();
}
#Bean
public Step stepcity(ItemReader<CitiesModelReader> readerSO,
ItemWriter<CitiesModelWriter> writerSO) {
LOGGER.info("Inside stepCity Method");
return stepBuilderFactory.get("stepcity").<CitiesModelReader, CitiesModelWriter>chunk(5)
.reader(readerSO).processor(citiesProcessor()).writer(customCitiesWriter()).transactionManager(platformTransactionManager(dataSourceWriter)).build();
}
#Bean
public JobExecutionListener listener() {
return new JobCompletionNotificationListener();
}
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
#Bean
public BatchWriteService batchWriteService() {
return new BatchWriteService();
}
#Bean
public PlatformTransactionManager platformTransactionManager(#Qualifier("dataSourceWriter") DataSource dataSourceWriter) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setDataSource(dataSourceWriter);
return transactionManager;
}
}
#Configuration
#EnableScheduling
public class BatchScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(BatchScheduler.class);
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager txManager) throws Exception {
LOGGER.info("Inside mapJobRepositoryFactory method");
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(txManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
LOGGER.info("Inside jobRepository method");
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
LOGGER.info("Inside jobLauncher method");
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
final SimpleAsyncTaskExecutor simpleAsyncTaskExecutor = new SimpleAsyncTaskExecutor();
launcher.setTaskExecutor(simpleAsyncTaskExecutor);
return launcher;
}
}
Your readers are not thread safe and not step scoped. Because of that, you're running into concurrency issues. Configure each of your stateful ItemReaders (the ones that implement ItemStream like the JdbcCursorItemReader), to be step scoped by adding the #StepScope annotation and things should work fine.

Resources