How to run a job multiple times parallelly with different excels as input in spring batch - spring

I have a use case where user upload different excel files where each file process parallelly and needs to save with job execution id for every row of the excel in h2 database.
But the issue I am facing here is when user uploads first file and the processing is going at back end and saving the excel data of every row with job id 1, but after if he uploading another excel file with out completing the first one with different data then the data related to first excel also getting saving with latest job execution id which is 2. So how to resolve this issue. so that each job data save with that particular id and has to run different jobs parlalley. this is the data for first excel sheet and this the data for second excel sheet and this is the output saving in h2 database.
This is the service class
#Service
public class BatchTestService {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
private JobLauncher batchJobLauncher;
#Autowired
private WriterImpl writer;
public Job job(byte data[]) {
return jobBuilderFactory.get("job")
.incrementer(new RunIdIncrementer())
.flow(step(data))
.end()
.build();
}
#SneakyThrows
public PoiItemReader<TestEntity> reader(byte[] data) {
ReaderImpl reader = new ReaderImpl();
reader.setLinesToSkip(1);
reader.setResource(toResource(data, "TEST"));
reader.setRowMapper(new MapperClass());
return reader;
}
public Step step(byte data[]) {
return stepBuilderFactory.get("step").<TestEntity, TestEntity>chunk(2).reader(reader(data))
.writer(writer)
.build();
}
public ThreadPoolTaskExecutor getThreadPoolTaskExecutor(){
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(2);
taskExecutor.setMaxPoolSize(4);
taskExecutor.setThreadNamePrefix("test");
taskExecutor.afterPropertiesSet();
return taskExecutor;
}
public void uploadExcel(MultipartFile file)
throws Exception {
String jobId = String.valueOf(System.currentTimeMillis());
JobParameters parameters = new JobParametersBuilder().addString("jobId", jobId)
.toJobParameters();
((SimpleJobLauncher) batchJobLauncher).setTaskExecutor(getThreadPoolTaskExecutor());
batchJobLauncher.run(job(file.getBytes()), parameters);
}
public static Resource toResource(byte bytesFile[], String sheetName) throws IOException {
ByteArrayInputStream bin = new ByteArrayInputStream(bytesFile);
XSSFWorkbook workbook = new XSSFWorkbook(bin);
var outputStream = new ByteArrayOutputStream();
workbook.write(outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
}
This is the config class.
#Configuration
public class BatchDataSourceConfig {
#Value("${spring.datasource.driver-class-name}")
private String driverName;
#Value("${spring.datasource.url}")
private String url;
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverName);
dataSource.setUrl(url);
dataSource.setUsername("sa");
dataSource.setPassword("");
return dataSource;
}
#Bean
public JobLauncher batchJobLauncher(JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
}
This is reader class
public class ReaderImpl extends PoiItemReader<TestEntity> {}
This is writer class
#Component
public class WriterImpl implements ItemWriter<TestEntity> {
private static Logger logger = LoggerFactory.getLogger(WriterImpl.class);
#Autowired
private TestEntityRepository testEntityRepository;
private StepExecution stepExecution;
#BeforeStep
public void beforeStep(final StepExecution stepExecution) {
this.stepExecution = stepExecution;
}
#Override
#SneakyThrows
public void write(List<? extends TestEntity> modelObjectList) {
logger.info("Writer is reached...");
Thread.sleep(3000);
for (TestEntity testEntity : modelObjectList) {
testEntity.setJobExecutionId(stepExecution.getJobExecutionId());
testEntityRepository.save(testEntity);
}
}
}
And also resprctive rowmapper class is also defined.
public class MapperClass implements RowMapper<TestEntity> {
#Override
public TestEntity mapRow(RowSet rowSet) {
TestEntity testEntity = new TestEntity();
testEntity.setStudentName(rowSet.getColumnValue(0));
testEntity.setRollNo(rowSet.getColumnValue(1));
testEntity.setSection(rowSet.getColumnValue(2));
return testEntity;
}
}
This is the model class
#AllArgsConstructor
#Data
#Entity
#NoArgsConstructor
#Table(name = "TEST_ENTITY")
public class TestEntity {
#GeneratedValue(strategy = GenerationType.AUTO)
#Id
private Integer id;
private String studentName;
private String rollNo;
private String section;
private Long jobExecutionId;
}

Related

How can we take the result of `MethodInvokingTaskletAdapter` as a reader in the Spring Batch Step?

How can we take the result of MethodInvokingTaskletAdapter as a reader in the Spring Batch Step? Reference - https://docs.spring.io/spring-batch/docs/current/reference/html/index-single.html#taskletStep and https://github.com/spring-projects/spring-batch/pull/567
Here is the code that I developed
JobConfiguration.java
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public CustomService service() {
return new CustomService();
}
#StepScope
#Bean
public MethodInvokingTaskletAdapter methodInvokingTasklet() {
MethodInvokingTaskletAdapter methodInvokingTaskletAdapter = new MethodInvokingTaskletAdapter();
methodInvokingTaskletAdapter.setTargetObject(service());
methodInvokingTaskletAdapter.setTargetMethod("getEmployees");
return methodInvokingTaskletAdapter;
}
#Bean
public Job methodInvokingJob() {
return this.jobBuilderFactory.get("methodInvokingJob")
.start(methodInvokingStep())
.build();
}
#Bean
public Step methodInvokingStep() {
// Looking to configure the Chunk based Step here, dont know how to do using MethodInvokingTaskletAdapter
return this.stepBuilderFactory.get("methodInvokingStep")
.tasklet(methodInvokingTasklet())
.build();
}
}
CustomService.java
public class CustomService {
public void serviceMethod(String message) {
System.out.println(message);
}
public void invokeMethod() {
System.out.println("=============== Your method has executed !");
}
public List<Employee> getEmployees(){
// In real world, it will be an GET API call to XYZ system
List<Employee> employees = new ArrayList<>();
employees.add(Employee.builder().firstName("Ravi").lastName("Shankar").email("ravi.shankar#gmail.com").age(30).build());
employees.add(Employee.builder().firstName("Parag").lastName("Rane").email("parag.rane#gmail.com").age(11).build());
employees.add(Employee.builder().firstName("Priya").lastName("Pande").email("priya.pande#gmail.com").age(40).build());
employees.add(Employee.builder().firstName("Kiran").lastName("khot").email("kiran.khot#gmail.com").age(50).build());
return employees;
}
}
Employee.java
#Data
#AllArgsConstructor
#NoArgsConstructor
#Builder
public class Employee {
private String firstName;
private String lastName;
private String email;
private int age;
}
MethodInvokingTaskletApplication.java
#EnableBatchProcessing
#SpringBootApplication
public class MethodInvokingTaskletApplication {
public static void main(String[] args) {
SpringApplication.run(MethodInvokingTaskletApplication.class, args);
}
}
To answer your question, you can't. The MethodInvokingTaskletAdapter is meant to adapt a POJO to a Tasklet. We have an ItemReaderAdapter that you can use to adapt a POJO to an ItemReader. You can read about it in the documentation here: https://docs.spring.io/spring-batch/docs/current/api/org/springframework/batch/item/adapter/ItemReaderAdapter.html
Now you'll have an issue with your service as configured in that each call to the delegating POJO is considered an item. That means that your item as configured will be a List<Employee> instead of just an Employee. Given your configuration states it's not the real service, I'll assume that your real service should return an Employee per call and null once the results are exhausted.
To update your configuration (with your service as it is configured in your question) in your sample:
...
#StepScope
#Bean
public ItemReaderAdapter itemReader() {
ItemReaderAdapter reader = new ItemReaderAdapter();
reader.setTargetObject(service());
reader.setTargetMethod("getEmployees");
return reader;
}
#Bean
public Job methodInvokingJob() {
return this.jobBuilderFactory.get("methodInvokingJob")
.start(methodInvokingStep())
.build();
}
#Bean
public Step methodInvokingStep() {
return this.stepBuilderFactory.get("methodInvokingStep")
.<List<Employee>, List<Employee>>chunk(methodInvokingTasklet())
.reader(itemReader())
// You'll need to define a writer...
.writer(itemWriter())
.build();
}
...

How to use ClassifierCompositeItemProcessor in Spring Batch and write data into same table for Insert and Upsert?

I went through the link - https://github.com/spring-projects/spring-batch/blob/master/spring-batch-infrastructure/src/test/java/org/springframework/batch/item/support/ClassifierCompositeItemProcessorTests.java, but did not strike much out of it.
I am trying to replace ETL Informatica mapping logic into the Batch. I am looking to separate out Status=I and Status=U into separate (Individual) processor and then further perform lookup and massage the data and then write those records directly into the table for Status=I and for status=U, perform another complex logic (like lookups, massaging and match and merge logic) and then upsert those records again into the same table.
I've tried to do POC, where I am looking to segregate the records in the processor
CustomerClassifier.java
public class CustomerClassifier implements Classifier<Customer, ItemProcessor<Customer, Customer>> {
private ItemProcessor<Customer, Customer> insertCustomerProcessor;
private ItemProcessor<Customer, Customer> updateCustomerProcessor;
public CustomerClassifier(ItemProcessor<Customer, Customer> evenCustomerProcessor, ItemProcessor<Customer, Customer> oddCustomerProcessor) {
this.insertCustomerProcessor= insertCustomerProcessor;
this.updateCustomerProcessor= updateCustomerProcessor;
}
#Override
public ItemProcessor<Customer, Customer> classify(Customer customer) {
return customer.getStatus().equals("I") ? insertCustomerProcessor : updateCustomerProcessor;
}
}
OddCustomerProcessor.java
public class OddCustomerProcessor implements ItemProcessor<Customer, Customer> {
#Override
public Customer process(Customer item) throws Exception {
Customer customer = new Customer();
// Perform some msaaging and lookups here
customer.setId(item.getId());
customer.setFirstName(item.getFirstName());
customer.setLastName(item.getLastName());
customer.setBirthdate(item.getBirthdate());
customer.setStatus(item.getStatus());
return customer;
}
}
EvenCustomerProcessor.java
public class EvenCustomerProcessor implements ItemProcessor<Customer, Customer> {
#Override
public Customer process(Customer item) throws Exception {
Customer customer = new Customer();
// Perform some msaaging and lookups here
customer.setId(item.getId());
customer.setFirstName(item.getFirstName());
customer.setLastName(item.getLastName());
customer.setBirthdate(item.getBirthdate());
customer.setStatus(item.getStatus());
return customer;
}
}
CustomLineAggregator.java
public class CustomLineAggregator implements LineAggregator<Customer> {
private ObjectMapper objectMapper = new ObjectMapper();
#Override
public String aggregate(Customer item) {
try {
return objectMapper.writeValueAsString(item);
} catch (Exception e) {
throw new RuntimeException("Unable to serialize Customer", e);
}
}
}
Customer.java
#Data
#AllArgsConstructor
#Builder
#NoArgsConstructor
public class Customer {
private Long id;
private String firstName;
private String lastName;
private String birthdate;
private String status;
}
Error-
The method setClassifier(Classifier<? super Customer,ItemProcessor<?,? extends Customer>>) in the type ClassifierCompositeItemProcessor<Customer,Customer> is not applicable for the
arguments (CustomerClassifier)
Configuration
#Configuration
public class JobConfiguration {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
private DataSource dataSource;
#Bean
public JdbcPagingItemReader<Customer> customerPagingItemReader(){
// reading database records using JDBC in a paging fashion
JdbcPagingItemReader<Customer> reader = new JdbcPagingItemReader<>();
reader.setDataSource(this.dataSource);
reader.setFetchSize(1000);
reader.setRowMapper(new CustomerRowMapper());
// Sort Keys
Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("id", Order.ASCENDING);
// MySQL implementation of a PagingQueryProvider using database specific features.
MySqlPagingQueryProvider queryProvider = new MySqlPagingQueryProvider();
queryProvider.setSelectClause("id, firstName, lastName, birthdate");
queryProvider.setFromClause("from customer");
queryProvider.setSortKeys(sortKeys);
reader.setQueryProvider(queryProvider);
return reader;
}
#Bean
public EvenCustomerProcessor evenCustomerProcessor() {
return new EvenCustomerProcessor();
}
#Bean
public OddCustomerProcessor oddCustomerProcessor() {
return new OddCustomerProcessor();
}
#Bean
public JdbcBatchItemWriter<Customer> customerItemWriter(){
JdbcBatchItemWriter<Customer> batchItemWriter = new JdbcBatchItemWriter<>();
batchItemWriter.setDataSource(dataSource);
batchItemWriter.setSql(""); // Query Goes here
return batchItemWriter;
}
#Bean
public ClassifierCompositeItemProcessor<Customer, Customer> classifierCustomerCompositeItemProcessor() throws Exception{
ClassifierCompositeItemProcessor<Customer, Customer> itemProcessor = new ClassifierCompositeItemProcessor<>();
itemProcessor.setClassifier(new CustomerClassifier(evenCustomerProcessor(), oddCustomerProcessor()));
}
#Bean
public Step step1() throws Exception {
return stepBuilderFactory.get("step1")
.<Customer, Customer> chunk(10)
.reader(customerPagingItemReader())
.processor(classifierCustomerCompositeItemProcessor())
.writer(customerItemWriter())
.build();
}
#Bean
public Job job() throws Exception {
return jobBuilderFactory.get("job")
.start(step1())
.build();
}
}
You can remove the CustomerClassifier and define the composite item processor as follows:
#Bean
public ClassifierCompositeItemProcessor<Customer, Customer> classifierCustomerCompositeItemProcessor(
EvenCustomerProcessor evenCustomerProcessor,
OddCustomerProcessor oddCustomerProcessor
) {
ClassifierCompositeItemProcessor<Customer, Customer> itemProcessor = new ClassifierCompositeItemProcessor<>();
itemProcessor.setClassifier(new Classifier<Customer, ItemProcessor<?, ? extends Customer>>() {
#Override
public ItemProcessor<?, ? extends Customer> classify(Customer customer) {
return customer.getStatus().equals("I") ? evenCustomerProcessor : oddCustomerProcessor;
}
});
return itemProcessor;
}
Then update your step definition as follows:
#Bean
public Step step1() throws Exception {
return stepBuilderFactory.get("step1")
.<Customer, Customer> chunk(10)
.reader(customerPagingItemReader())
.processor(classifierCustomerCompositeItemProcessor(evenCustomerProcessor(), oddCustomerProcessor()))
.writer(customerItemWriter())
.build();
}

Spring Batch Multiple JobExecutionListener - not working

I have a Spring boot batch application.
I have two job and two different JobExecutionListener. But always only one JobExecutionListener is called from both the job.
Job One
#Configuration
public class TestDriveJob {
#Autowired
JobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
TestDriveTasklet testDriveTasklet;
#Autowired
BatchJobMgmt batchJobMgmt;
#Autowired
TestDriveJobExecutionListener testDriveJobExecutionListener;
private static final String JOB_NAME = "test-drive-job";
#Scheduled(fixedDelay = 100 * 1000, initialDelay = 5000)
public void schedule() throws Exception {
if (batchJobMgmt.isJobAllowed(JOB_NAME)) {
JobParameters param = new JobParametersBuilder().addString("JobID", "TEST" + String.valueOf(System.currentTimeMillis())).toJobParameters();
jobLauncher.run(job(), param);
}
}
#Bean
public Job job() {
return jobBuilderFactory.get(JOB_NAME).listener(testDriveJobExecutionListener).incrementer(new RunIdIncrementer()).start(testDriveStep()).build();
}
#Bean
public Step testDriveStep() {
return stepBuilderFactory.get("test-drive-step").tasklet(testDriveTasklet).listener(testDriveJobExecutionListener).build();
}
}
Job Two
#Configuration
public class ExtendedWarrantyJob {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtendedWarrantyJob.class);
#Autowired
JobLauncher jobLauncher;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
ExtendedWarrantyTasklet extendedWarrantyTasklet;
#Autowired
BatchJobMgmt batchJobMgmt;
#Autowired
ExtendedWarrantyJobExecutionListener extendedWarrantyJobExecutionListener;
private static final String JOB_NAME = "extended-warranty-job";
#Scheduled(fixedDelay = 900 * 1000, initialDelay = 20000)
public void schedule() throws Exception {
if (batchJobMgmt.isJobAllowed(JOB_NAME)) {
LOGGER.debug("Extended Warranty Job Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID", String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = jobLauncher.run(job(), param);
LOGGER.debug("Extended Warranty Job finished with status :" + execution.getStatus());
}
}
#Bean
public Job job() {
return jobBuilderFactory.get(JOB_NAME).listener(extendedWarrantyJobExecutionListener).incrementer(new RunIdIncrementer()).start(extendedWarrantyStep())
.build();
}
#Bean
public Step extendedWarrantyStep() {
return stepBuilderFactory.get("extended-warranty-step").tasklet(extendedWarrantyTasklet).build();
}
}
Job one Listener
#Service
public class TestDriveJobExecutionListener implements JobExecutionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(TestDriveJobExecutionListener.class);
#Override
public void beforeJob(JobExecution jobExecution) {
LOGGER.debug("{} Started at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
#Override
public void afterJob(JobExecution jobExecution) {
LOGGER.debug("{} Completed at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
}
Job Two Listener
#Service
public class ExtendedWarrantyJobExecutionListener implements JobExecutionListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ExtendedWarrantyJobExecutionListener.class);
#Override
public void beforeJob(JobExecution jobExecution) {
LOGGER.debug("{} Started=== at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
#Override
public void afterJob(JobExecution jobExecution) {
LOGGER.debug("{} Completed at : {}", jobExecution.getJobInstance().getJobName(), new Date());
}
}
Both the jobs are executing without any issue. But Always the job one listener is called. Job Two Listener is not working.
I tried adding #Qualifier to the ##Autowired annotation, but it did not make any sense.
any help is appreciated.

How to copy a file from ftp server to local directory using sftp and spring boot,Java

I have codes for Inbound and Outbound channel adapter over SFTP. I want to call those method via spring boot scheduler not using polling. Looking for example how to call resultFileHandler() method
public class SftpConfig {
#Value("${nodephone.directory.sftp.host}")
private String sftpHost;
#Value("${nodephone.directory.sftp.port}")
private int sftpPort;
#Value("${nodephone.directory.sftp.user}")
private String sftpUser;
#Value("${nodephone.directory.sftp.password}")
private String sftpPasword;
#Value("${nodephone.directory.sftp.remote.directory.download}")
private String sftpRemoteDirectoryDownload;
#Value("${nodephone.directory.sftp.remote.directory.upload}")
private String sftpRemoteDirectoryUpload;
#Value("${nodephone.directory.sftp.remote.directory.filter}")
private String sftpRemoteDirectoryFilter;
#Value("${nodephone.directory.sftp.remote.directory.localDirectory}")
private String sftpLocalDirectory;
// private FtpOrderRequestHandler handler;
#Bean
public SessionFactory<LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(sftpHost);
factory.setPort(sftpPort);
factory.setUser(sftpUser);
factory.setPassword(sftpPasword);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<LsEntry>(factory);
}
#Bean
public SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(true);
fileSynchronizer.setRemoteDirectory(sftpRemoteDirectoryDownload);
fileSynchronizer.setFilter(new SftpSimplePatternFileListFilter(sftpRemoteDirectoryFilter));
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(cron = "0/5 * * * * *"))
public MessageSource<File> sftpMessageSource() {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
sftpInboundFileSynchronizer());
source.setAutoCreateLocalDirectory(true);
source.setLocalFilter(new AcceptOnceFileListFilter<File>());
source.setLocalDirectory(new File("/local"));
return source;
}
#Bean
#ServiceActivator(inputChannel = "fromSftpChannel")
public MessageHandler resultFileHandler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
System.out.println("********************** " + message.getPayload());
}
};
}
I have tested with Configuration annotation and it reads the file from the server, but I want to run this from Cron instead of polling, how do I call the method resultFileHandler()
I've never done this using Spring Integration in any production code although I did something like below, to download files from remote servers using sftp/ftp.
I'm only using the SftpOutboundGateway (there could be better ways), to call the "mget" method and fetch the payload (file).
#Configuration
#ConfigurationProperties(prefix = "sftp")
#Setter
#Getter
#EnableIntegration
public class RemoteFileConfiguration {
private String clients;
private String hosts;
private int ports;
private String users;
private String passwords;
#Bean(name = "clientSessionFactory")
public SessionFactory<LsEntry> clientSessionFactory() {
DefaultSftpSessionFactory sf = new DefaultSftpSessionFactory();
sf.setHost(hosts);
sf.setPort(ports);
sf.setUser(users);
sf.setPassword(passwords);
sf.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(sf);
}
#Bean
#ServiceActivator(inputChannel = "sftpChannel")
public MessageHandler clientMessageHandler() {
SftpOutboundGateway sftpOutboundGateway = new SftpOutboundGateway(
clientSessionFactory(), "mget", "payload");
sftpOutboundGateway.setAutoCreateLocalDirectory(true);
sftpOutboundGateway.setLocalDirectory(new File("/users/localPath/client/INPUT/"));
sftpOutboundGateway.setFileExistsMode(FileExistsMode.REPLACE_IF_MODIFIED);
sftpOutboundGateway.setFilter(new AcceptOnceFileListFilter<>());
return sftpOutboundGateway;
}
}
#MessagingGateway
public interface SFTPGateway {
#Gateway(requestChannel = "sftpChannel")
List<File> get(String dir);
}
To ensure we use cron to execute this, I have used a Tasklet which is executed by Spring Batch, when I need it to be using a cron expression.
#Slf4j
#Getter
#Setter
public class RemoteFileInboundTasklet implements Tasklet {
private RemoteFileTemplate remoteFileTemplate;
private String remoteClientDir;
private String clientName;
private SFTPGateway sftpGateway;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext)
throws Exception {
List<File> files = sftpGateway.get(remoteClientDir);
if (CollectionUtils.isEmpty(files)) {
log.warn("No file was downloaded for client {}.", clientName);
return RepeatStatus.FINISHED;
}
log.info("Total file: {}", files.size());
return RepeatStatus.FINISHED;
}
}
NOTE: If you don't want to use Batch's Tasklet, you can use your #Component class and inject the Gateway to call "get" method.
#Autowired
private SFTPGateway sftpGateway;

How should I use .tasklet() / .chunk() to finish job succesfully?

I use Spring Batch for cloning table from source to target database. The job is started manually from service layer using jobLauncher with passing parameters.
Everything is fine, but using current configuration (below) with .chunk(10) in step description I have only 10 rows cloned and Caused by: java.sql.SQLException: Result set already closed exception.
How to describe step properly just to finish read->write the whole table from source to target DB?
#Configuration
#EnableBatchProcessing
public class DatasetProcessingContext {
private static final String OVERRIDEN_BY_JOB_PARAMETER = null;
private static final String DATASET_PROCESSING_STEP = "datasetProcessingStep";
private static final String DATASET_PROCESSING_JOB = "datasetProcessingJob";
public static final String SUBSYSTEM = "subsystem";
public static final String SQL = "sql";
public static final String SOURCE_DATASOURCE = "sourceDatasource";
public static final String INSERT_QUERY = "insertQuery";
public static final String TARGET_DATASOURCE = "targetDatasource";
#Autowired
#Qualifier(DEV_DATA_SOURCE)
private DataSource devDataSource;
//set of datasources
#Autowired
private PlatformTransactionManager transactionManager;
#SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
#Autowired
private Map<String, TableMessageDataRowMapper> tableMessageDataRowMappers;
#SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
#Autowired
private Map<String, TableMessageDataPreparedStatementSetter> messageDataPreparedStatementSetters;
#Autowired
private JobBuilderFactory jobsFactory;
#Autowired
private StepBuilderFactory stepsFactory;
#Bean
public JobRepository jobRepository() throws Exception {
return new MapJobRepositoryFactoryBean(transactionManager).getObject();
}
#Bean
public JobRegistry jobRegistry() {
return new MapJobRegistry();
}
#Bean
public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor() {
JobRegistryBeanPostProcessor postProcessor = new JobRegistryBeanPostProcessor();
postProcessor.setJobRegistry(jobRegistry());
return postProcessor;
}
#Bean
public JobLauncher jobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository());
return jobLauncher;
}
#Bean
public static StepScope stepScope() {
return new StepScope();
}
#Bean
#SuppressWarnings("unchecked")
#Scope(value = "step", proxyMode = ScopedProxyMode.INTERFACES)
public ItemStreamReader jdbcReader(#Value("#{jobParameters['" + SUBSYSTEM + "']}") String subsystem,
#Value("#{jobParameters['" + SQL + "']}") String sql,
#Value("#{jobParameters['" + SOURCE_DATASOURCE + "']}") String sourceDatasource) {
JdbcCursorItemReader jdbcCursorItemReader = new JdbcCursorItemReader();
jdbcCursorItemReader.setDataSource(getDataSourceFromEnum(TargetDataSource.valueOf(sourceDatasource)));
jdbcCursorItemReader.setSql(sql);
jdbcCursorItemReader.setRowMapper((RowMapper) tableMessageDataRowMappers
.get(subsystem + TABLE_MESSAGE_DATA_ROW_MAPPER));
return jdbcCursorItemReader;
}
#Bean
#SuppressWarnings("unchecked")
#Scope(value = "step", proxyMode = ScopedProxyMode.INTERFACES)
public ItemWriter jdbcWriter(#Value("#{jobParameters['" + SUBSYSTEM + "']}") String subsystem,
#Value("#{jobParameters['" + INSERT_QUERY + "']}") String insertQuery,
#Value("#{jobParameters['" + TARGET_DATASOURCE + "']}") String targetDatasource) {
JdbcBatchItemWriter jdbcWriter = new JdbcBatchItemWriter();
jdbcWriter.setDataSource(getDataSourceFromEnum(TargetDataSource.valueOf(targetDatasource)));
jdbcWriter.setSql(insertQuery);
jdbcWriter.setItemPreparedStatementSetter(messageDataPreparedStatementSetters
.get(subsystem + TABLE_MESSAGE_DATA_PREPARED_STATEMENT_SETTER));
return jdbcWriter;
}
#Bean
#SuppressWarnings("unchecked")
public Step datasetProcessingStep() {
return stepsFactory.get(DATASET_PROCESSING_STEP)
// should I create Tasklet or chunk with some CompletionPolicy?
.chunk(10)
.reader(jdbcReader(OVERRIDEN_BY_JOB_PARAMETER, OVERRIDEN_BY_JOB_PARAMETER, OVERRIDEN_BY_JOB_PARAMETER))
.writer(jdbcWriter(OVERRIDEN_BY_JOB_PARAMETER, OVERRIDEN_BY_JOB_PARAMETER, OVERRIDEN_BY_JOB_PARAMETER))
.allowStartIfComplete(true)
.build();
}
#Bean
public Job datasetProcessingJob() {
return jobsFactory.get(DATASET_PROCESSING_JOB).start(datasetProcessingStep()).build();
}
Using .chunk(new DefaultResultCompletionPolicy()) in step description is suitable for my case. This policy returns true from isComplete(RepeatContext context, RepeatStatus result) in case of null-result - than ResultSet is over.

Resources