If exception thrown, spring batch should not go to last method - spring

#Configuration
public class BookStorePoliciesJobConfiguration {
private static final String CRON_EXPRESSION_FOR_JOB = "0 0/10 * 1/1 * ? *";//change
private static final String JOB_NAME = "bookStorePolicyJob";
private static final String JOB_STEP_NAME = JOB_NAME + "Step";
#Autowired
private DataSource nonXAcrmDataSource;
#Autowired
private JtaTransactionManager transactionManager;
#Bean
public CanerScheduledJobFactoryBean bookStorePoliciesScheduledJob() {
MafScheduledJobFactoryBean bean = new MafScheduledJobFactoryBean();
bean.setBatchJobName(JOB_NAME);
bean.setCronExp(CRON_EXPRESSION_FOR_JOB);
bean.setNonXAcrmDataSource(this.nonXAcrmDataSource);
return bean;
}
#Bean
public Job bookStorePolicyJob(#Autowired BookStorePoliciesJobTasklet tasklet,
#Autowired JobRepository batchJobRepository) {
SimpleJob job = new SimpleJob(JOB_NAME);
job.setSteps(Collections.singletonList(bookStorePolicyJobStep(tasklet, batchJobRepository)));
job.setJobRepository(batchJobRepository);
return job;
}
public Step bookStorePolicyJobStep(BookStoreJobTasklet tasklet, JobRepository batchJobRepository) {
TaskletStep step = new TaskletStep(JOB_STEP_NAME);
step.setTasklet(tasklet);
step.setJobRepository(batchJobRepository);
transactionManager.setAllowCustomIsolationLevels(true);
step.setTransactionManager(transactionManager);
return step;
}
}
This is the job:
#Component
public class BookStoreJobTasklet extends CanerTasklet {
#Override
public RepeatStatus doExecute(StepContribution contribution, ChunkContext chunkContext) {
///
//
sendFileBySftp(fileName, file);//if this throws, should not go down line, should exit but it goes
updateParameters();//if it is successfull only!
return RepeatStatus.FINISHED;
}
And the method:
private void sendFileBySftp(String fileName, File file) {
//
//
try{
//
} catch (JSchException | SftpException | IOException e) {
logger.error("error in sendFileFTP {}", e.getMessage());//it comes here and continues. it should exit?
}
logger.info("Session connection closed....");
}
What should i do to stop batch?
i look at here
Make a spring-batch job exit with non-zero code if an exception is thrown
Should i return RepeatStatus for each method so i can check if failed?
Maybe a boolean value and for each catch block, putting it to false?

Related

Spring Batch Multiple JobExecutionListener - not working

I have a Spring boot batch application.
I have two job and two different JobExecutionListener. But always only one JobExecutionListener is called from both the job.
Job One
#Configuration
public class TestDriveJob {
#Autowired
JobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
TestDriveTasklet testDriveTasklet;
#Autowired
BatchJobMgmt batchJobMgmt;
#Autowired
TestDriveJobExecutionListener testDriveJobExecutionListener;
private static final String JOB_NAME = "test-drive-job";
#Scheduled(fixedDelay = 100 * 1000, initialDelay = 5000)
public void schedule() throws Exception {
if (batchJobMgmt.isJobAllowed(JOB_NAME)) {
JobParameters param = new JobParametersBuilder().addString("JobID", "TEST" + String.valueOf(System.currentTimeMillis())).toJobParameters();
jobLauncher.run(job(), param);
}
}
#Bean
public Job job() {
return jobBuilderFactory.get(JOB_NAME).listener(testDriveJobExecutionListener).incrementer(new RunIdIncrementer()).start(testDriveStep()).build();
}
#Bean
public Step testDriveStep() {
return stepBuilderFactory.get("test-drive-step").tasklet(testDriveTasklet).listener(testDriveJobExecutionListener).build();
}
}
Job Two
#Configuration
public class ExtendedWarrantyJob {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtendedWarrantyJob.class);
#Autowired
JobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
ExtendedWarrantyTasklet extendedWarrantyTasklet;
#Autowired
BatchJobMgmt batchJobMgmt;
#Autowired
ExtendedWarrantyJobExecutionListener extendedWarrantyJobExecutionListener;
private static final String JOB_NAME = "extended-warranty-job";
#Scheduled(fixedDelay = 900 * 1000, initialDelay = 20000)
public void schedule() throws Exception {
if (batchJobMgmt.isJobAllowed(JOB_NAME)) {
LOGGER.debug("Extended Warranty Job Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID", String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = jobLauncher.run(job(), param);
LOGGER.debug("Extended Warranty Job finished with status :" + execution.getStatus());
}
}
#Bean
public Job job() {
return jobBuilderFactory.get(JOB_NAME).listener(extendedWarrantyJobExecutionListener).incrementer(new RunIdIncrementer()).start(extendedWarrantyStep())
.build();
}
#Bean
public Step extendedWarrantyStep() {
return stepBuilderFactory.get("extended-warranty-step").tasklet(extendedWarrantyTasklet).build();
}
}
Job one Listener
#Service
public class TestDriveJobExecutionListener implements JobExecutionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(TestDriveJobExecutionListener.class);
#Override
public void beforeJob(JobExecution jobExecution) {
LOGGER.debug("{} Started at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
#Override
public void afterJob(JobExecution jobExecution) {
LOGGER.debug("{} Completed at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
}
Job Two Listener
#Service
public class ExtendedWarrantyJobExecutionListener implements JobExecutionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtendedWarrantyJobExecutionListener.class);
#Override
public void beforeJob(JobExecution jobExecution) {
LOGGER.debug("{} Started=== at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
#Override
public void afterJob(JobExecution jobExecution) {
LOGGER.debug("{} Completed at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
}
Both the jobs are executing without any issue. But Always the job one listener is called. Job Two Listener is not working.
I tried adding #Qualifier to the ##Autowired annotation, but it did not make any sense.
any help is appreciated.

Failed to run job based on schedule

I'm tying to run the batch based on schedule.I have used scheduled annotation & cron expression. Batch is running only once. No error is getting displayed. I have added maven dependency for quartz.I have not added any XML file.
#EnableAutoConfiguration(exclude = { DataSourceAutoConfiguration.class,SwaggerConfig.class,
WebMvcAutoConfiguration.class,RepositoryRestMvcAutoConfiguration.class })
#EnableScheduling
#ComponentScan
public class BatchApplication {
public static void main(String[] args) throws Exception {
SpringApplication app = new SpringApplication(BatchApplication.class);
app.setWebEnvironment(false);
ConfigurableApplicationContext ctx = app.run(args);
System.out.println(ctx.getBean(DataSource.class));
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
Job addLeaveAllocationJob = ctx.getBean("addLeaveAllocationJob", Job.class);
JobParameters jobParameters = new JobParametersBuilder().addDate("date", new Date())
.toJobParameters();
JobExecution jobExecution = jobLauncher.run(addLeaveAllocationJob, jobParameters);
BatchStatus batchStatus = jobExecution.getStatus();
while(batchStatus.isRunning()){
System.out.println("*** Still Running ************");
Thread.sleep(2000);
}
}
}
I have job class which is scheduled with #scheduled annotation with cron expression.
#Configuration
#EnableBatchProcessing
#Component
public class LeaveAllocationJobConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private EntityManagerFactory entityManagerFactory;
#Bean
public ItemReader<Employee> reader() {
JpaPagingItemReader<Employee> employeeReader = new JpaPagingItemReader<Employee>();
employeeReader.setEntityManagerFactory(entityManagerFactory);
employeeReader.setQueryString("from Employee");
return employeeReader;
}
#Bean
#Scheduled(cron="0 0/1 * 1/1 * ? *")
public Job addLeaveAllocationJob() {
System.out.println("Hello");
return jobs.get("addLeaveAllocationJob").listener(protocolListener()).start(step()).build();
}
#Bean
public Step step() {
// important to be one in this case to commit after every line read
return stepBuilderFactory.get("step").<Employee, EmployeeDTO> chunk(1).reader(reader()).processor(processor())
.writer(writer()).build();
}
/**
* #return
*/
#Bean
public ItemWriter<? super EmployeeDTO> writer() {
return new ItemWriter<EmployeeDTO>() {
#Override
public void write(List<? extends EmployeeDTO> items) throws Exception {
System.out.println("Processing " + items);
}
};
}
#Bean
public ItemProcessor<Employee, EmployeeDTO> processor() {
return new ItemProcessor<Employee, EmployeeDTO>() {
#Override
public EmployeeDTO process(Employee employee) throws Exception {
return new EmployeeDTO(employee);
}
};
}
#Bean
public ProtocolListener protocolListener() {
return new ProtocolListener();
}
}
Please help me to solve the issue
Chek this link.enter link description here

Spring Batch: Retrying a tasklet using #Retryable and #EnableRetry annotation

I have this tasklet which uploads a file to Amazon S3. Now, I want to retry the tasklet execution whenever an AmazonClientException is thrown. I figured using #Retryable annotation will do the job.
Tasklet:
#Component
#StepScope
#Retryable(value=AmazonClientException.class, stateful=true, backoff=#Backoff(2000))
public class S3UploadTasklet extends ArgsSupport implements Tasklet {
#Autowired
private S3Client s3Client;
#Autowired
private S3Properties s3Properties;
private static final Logger LOGGER = LoggerFactory.getLogger(S3UploadTasklet.class);
private static final String FILE_EXTENSION = ".gpg";
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
try {
String localFilename = getTempOutputFilename() + FILE_EXTENSION;
String s3Filename = s3Properties.getReportPath() + getS3OutputFilename() + FILE_EXTENSION;
File f = new File(localFilename);
if(f.exists()) {
LOGGER.info("Uploading " + localFilename + " to s3...");
s3Client.upload(localFilename, s3Filename, s3Properties.getBucketName());
LOGGER.info("Uploading done!");
} else {
throw new RuntimeException("Encrypted file not found! Encryption process might have failed.");
}
} catch(AmazonClientException e) {
LOGGER.error("Problems uploading to S3. " + e.getMessage(), e);
throw e;
} catch(RuntimeException e) {
LOGGER.error("Runtime error occured. " + e.getMessage(), e);
throw e;
}
return RepeatStatus.FINISHED;
}
}
Job configuration:
#Configuration
#EnableBatchProcessing
#EnableRetry
public class BatchConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private Step generateReport;
#Autowired
private Step encrypt;
#Autowired
private Step upload;
#Autowired
private Step cleanUp;
#Bean
#Transactional(value="appTransactionManager", isolation=Isolation.READ_COMMITTED)
public Job generateReconJob() {
return jobBuilderFactory.get("reconJob")
.incrementer(new RunIdIncrementer())
.start(generateReport)
.on("COMPLETED").to(encrypt)
.from(generateReport)
.on("NOOP").end()
.from(generateReport)
.on("FAILED").to(cleanUp)
.from(encrypt)
.on("COMPLETED").to(upload)
.from(encrypt)
.on("FAILED").to(cleanUp)
.from(upload)
.on("*").to(cleanUp)
.from(cleanUp)
.on("*").end()
.end()
.build();
}
}
However, it doesn't do what it is supposed to do. The batch job still doesn't retry the tasklet when the exception is thrown.
Any thoughts?
Here's the config also
#Configuration
public class ReportConfiguration {
...
#Autowired
private S3UploadTasklet s3UploadTasklet;
...
#Bean
public Step upload() {
return stepBuilderFactory.get("upload")
.tasklet(s3UploadTasklet)
.build();
}
}
The #Retryable(value=AmazonClientException.class, stateful=true, backoff=#Backoff(2000)) annotation should be on the method you want to retry, not the class.

Why the intemReader is always sending the exact same value to CustomItemProcessor

Why does the itemReader method is always sending the exact same file name to be processed in CustomItemProcessor?
As far as I understand, since I settup reader as #Scope and I set more than 1 in chunk, I was expecting the "return s" to move forward to next value from String array.
Let me clarify my question with a debug example in reader method:
1 - the variable stringArray is filled in with 3 file names (f1.txt, f2.txt and f3.txt)
2 - "return s" is evoked with s = f1.txt
3 - "return s" evoked again before evoked customItemProcessor method (perfect untill here since chunk = 2)
4 - looking at s it contains f1.txt again (different from what I expected. I expected f2.txt)
5 and 6 - runs processor with same name f1.tx (it should work correctly if the second turn of "return s" would contain f2.txt)
7 - writer method works as expected (processedFiles contain twice the two names processed in customItemProcessor f1.txt and f1.txt again since same name was processed twice)
CustomItemReader
public class CustomItemReader implements ItemReader<String> {
#Override
public String read() throws Exception, UnexpectedInputException,
ParseException, NonTransientResourceException {
String[] stringArray;
try (Stream<Path> stream = Files.list(Paths.get(env
.getProperty("my.path")))) {
stringArray = stream.map(String::valueOf)
.filter(path -> path.endsWith("out"))
.toArray(size -> new String[size]);
}
//*** the problem is here
//every turn s variable receives the first file name from the stringArray
if (stringArray.length > 0) {
for (String s : stringArray) {
return s;
}
} else {
log.info("read method - no file found");
return null;
}
return null;
}
CustomItemProcessor
public class CustomItemProcessor implements ItemProcessor<String , String> {
#Override
public String process(String singleFileToProcess) throws Exception {
log.info("process method: " + singleFileToProcess);
return singleFileToProcess;
}
}
CustomItemWriter
public class CustomItemWriter implements ItemWriter<String> {
private static final Logger log = LoggerFactory
.getLogger(CustomItemWriter.class);
#Override
public void write(List<? extends String> processedFiles) throws Exception {
processedFiles.stream().forEach(
processedFile -> log.info("**** write method"
+ processedFile.toString()));
FileSystem fs = FileSystems.getDefault();
for (String s : processedFiles) {
Files.deleteIfExists(fs.getPath(s));
}
}
Configuration
#Configuration
#ComponentScan(...
#EnableBatchProcessing
#EnableScheduling
#PropertySource(...
public class BatchConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private JobRepository jobRepository;
#Bean
public TaskExecutor getTaskExecutor() {
return new TaskExecutor() {
#Override
public void execute(Runnable task) {
}
};
}
//I can see the number in chunk reflects how many time customReader is triggered before triggers customProcesser
#Bean
public Step step1(ItemReader<String> reader,
ItemProcessor<String, String> processor, ItemWriter<String> writer) {
return stepBuilderFactory.get("step1").<String, String> chunk(2)
.reader(reader).processor(processor).writer(writer)
.allowStartIfComplete(true).build();
}
#Bean
#Scope
public ItemReader<String> reader() {
return new CustomItemReader();
}
#Bean
public ItemProcessor<String, String> processor() {
return new CustomItemProcessor();
}
#Bean
public ItemWriter<String> writer() {
return new CustomItemWriter();
}
#Bean
public Job job(Step step1) throws Exception {
return jobBuilderFactory.get("job1").incrementer(new RunIdIncrementer()).start(step1).build();
}
Scheduler
#Component
public class QueueScheduler {
private static final Logger log = LoggerFactory
.getLogger(QueueScheduler.class);
private Job job;
private JobLauncher jobLauncher;
#Autowired
public QueueScheduler(JobLauncher jobLauncher, #Qualifier("job") Job job){
this.job = job;
this.jobLauncher = jobLauncher;
}
#Scheduled(fixedRate=60000)
public void runJob(){
try{
jobLauncher.run(job, new JobParameters());
}catch(Exception ex){
log.info(ex.getMessage());
}
}
}
Your issue is that you are relying on an internal loop to iterate over the items instead of letting Spring Batch do it for you by calling ItemReader#read multiple times.
What I'd recommend is changing your reader to the something like the following:
public class JimsItemReader implements ItemStreamReader {
private String[] items;
private int curIndex = -1;
#Override
public void open(ExecutionContext ec) {
curIndex = ec.getInt("curIndex", -1);
String[] stringArray;
try (Stream<Path> stream = Files.list(Paths.get(env.getProperty("my.path")))) {
stringArray = stream.map(String::valueOf)
.filter(path -> path.endsWith("out"))
.toArray(size -> new String[size]);
}
}
#Override
public void update(ExecutionContext ec) {
ec.putInt("curIndex", curIndex);
}
#Override
public String read() {
if (curIndex < items.length) {
curIndex++;
return items[curIndex];
} else {
return null;
}
}
}
The above example should loop through the items of your array as they are read. It also should be restartable in that we're storing the index in the ExecutionContext so if the job is restarted after a failure, you'll restart where you left off.

Is there a bug in Spring Batch Step flow function?

In the below piece of code, when StepA fails only StepB and StepC should execute but what actually happens is that all the 3 steps are getting executed! I want to split a spring batch job depending upon whether a step passes or not. I know that there are other ways of doing this by using JobDecider, setting some job parameter, etc but I wanted to know I was doing wrongly here?
#Configuration
#EnableBatchProcessing
public class JobConfig {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public PlatformTransactionManager transactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public JobRepository jobRepository() {
try {
return new MapJobRepositoryFactoryBean(transactionManager())
.getJobRepository();
} catch (Exception e) {
return null;
}
}
#Bean
public JobLauncher jobLauncher() {
final SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository());
return launcher;
}
#Bean
public Job job() {
return jobBuilderFactory.get("job").
flow(stepA()).on("FAILED").to(stepC()).next(stepD()).
from(stepA()).on("*").to(stepB()).next(stepC()).end().build();
}
#Bean
public Step stepA() {
return stepBuilderFactory.get("stepA")
.tasklet(new RandomFailTasket("stepA")).build();
}
#Bean
public Step stepB() {
return stepBuilderFactory.get("stepB")
.tasklet(new PrintTextTasklet("stepB")).build();
}
#Bean
public Step stepC() {
return stepBuilderFactory.get("stepC")
.tasklet(new PrintTextTasklet("stepC")).build();
}
#Bean
public Step stepD() {
return stepBuilderFactory.get("stepD")
.tasklet(new PrintTextTasklet("stepD")).build();
}
#SuppressWarnings("resource")
public static void main(String[] args) {
// create spring application context
final ApplicationContext appContext = new AnnotationConfigApplicationContext(
JobConfig.class);
// get the job config bean (i.e this bean)
final JobConfig jobConfig = appContext.getBean(JobConfig.class);
// get the job launcher
JobLauncher launcher = jobConfig.jobLauncher();
try {
// launch the job
JobExecution execution = launcher.run(jobConfig.job(), new JobParameters());
System.out.println(execution.getJobInstance().toString());
} catch (JobExecutionAlreadyRunningException e) {
e.printStackTrace();
} catch (JobRestartException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JobInstanceAlreadyCompleteException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (JobParametersInvalidException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
StepA: is a dummy job which fails i.e it throws some exception
public class RandomFailTasket extends PrintTextTasklet {
public RandomFailTasket(String text) {
super(text);
}
public RepeatStatus execute(StepContribution arg0, ChunkContext arg1)
throws Exception {
if (Math.random() < 0.5){
throw new Exception("fail");
}
return RepeatStatus.FINISHED;
}
}
StepB, StepC, StepD are also dummy tasklets:
public class PrintTextTasklet implements Tasklet {
private final String text;
public PrintTextTasklet(String text){
this.text = text;
}
public RepeatStatus execute(StepContribution arg0, ChunkContext arg1)
throws Exception {
System.out.println(text);
return RepeatStatus.FINISHED;
}
}
need to have a look at the xml structure that you are using.
Try using Step listener - and then in the after step method you can check the Step status and then you can implement your logic to call the next step or not

Resources