No transaction in the writer of spring batch - spring-boot

In a spring boot app, I use spring batch. I have two datasource.
For the reader, I use JpaPagingItemReader
My batch config class
#Configuratoin
#EnableBatchProcessing
public class ScoreConfig{
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
#Qualifier("billingEntityManagerFactory")
EntityManagerFactory billingEmf;
#Autowired
#Qualifier("crEntityManagerFactory")
EntityManagerFactory crEmf;
#Bean
BatchConfigurer configurer(#Qualifier("billingDataSource") DataSource dataSource){
return new DefaultBatchConfigurer(dataSource);
}
#Bean
public JpaPagingItemReader<PR> billingJpaPagingItemReader(){
return new JpaPagingItemReaderBuilder<PR>()
.name("prItemReader")
.entityManagerFactory(billingEmf)
.queryString("...")
.pageSize(10)
.build();
}
#Bean
public ItemProcessor<PR,CR> processor(){
return new ScoreProcessor();
}
#Bean
public JpaItemWriter writer(){
JpaItemWriter writer = new JpaItemWriter();
writer.setEntityManagerFactory(crEmf);
return writer;
}
#Bean
public Job scoreJob(){
return jobBuilderFactory
.get("scoreJob")
.start(scoreStep())
.build();
}
#Bean
public Step scoreStep(){
return stepBuilderFactory
.get("scoreStep")
.<PR, CS>chunk(1)
.reader(billingJpaPagingItemReader())
.processor())
.writer(writer())
.build();
}
}
My config class for my writer datasource
#Configuration
#enableTransactionManagement
#EnableJpaRepositories(
entityManagerFactoryRef="crEntityManagerFactory",
transactionManagerRef="crTransactionManager",
basePackage="com.cnn.cs.domain.repository.cr")
public class CrDatasourceConfig{
#Bean(name="crDataSourceProperties")
#ConfigurationProperties("cr.datasource")
public DataSourceProperties crDataSourceProperties(){
return new DataSourceProperties();
}
#Bean(name="crDataSource")
#ConfigurationProperties("cr.datasource.configuration")
public DataSource crDatasource(){
return crDataSourceProperties().initializeDataSourceBuilder()
.type(HikariDataSource.class).build();
}
#Bean(name="crEntityManagerFactory")
public LocalContainerEntityManagerFactoryBean crEntityManagerFactory(EntityManagerFactoryBuilder builder){
return builder
.dataSource(crDataSource())
.package(Cr.class)
.build();
}
#Bean(name="crTransactionManager")
public PlatformTransactionManager crTransactionManager(#Qualifier("crEntityManagerFactory") LocalContainerEntityManagerFactoryBean crEntityManagerFactory){
return new JpaTransactionManager(crEntityManagerFactory.getObject());
}
}
When I run application with a dummy writer, job is completed without issue.
With my writer, I get javax.persistence.TransactionRequiredException:
no transaction is in progress
Edit solution are to put both transaction in a ChainedTransactionmanager

Related

Why are the data sources interfering in Spring Batch when using a RepositoryItemReader?

I am trying to migrate some data between a Postgres database and MongoDB using Spring Batch. I have a very simple ItemReader, ItemProcessor, and ItemWriter configured, and it everything works as intended. However, if I switch to a RepositoryItemReader, I'm getting the following error:
java.lang.IllegalStateException: Already value [org.springframework.jdbc.datasource.ConnectionHolder#684430c1] for key [HikariDataSource (HikariPool-1)] bound to thread
If I understand correctly, there is something wrong with the EntityManager or TransactionManager, but I cannot figure out what, and why it's working with a simple ItemReader that doesn't work with a repository, but it uses the same data source.
I would be very grateful for any help.
Here is my source db configuration:
package com.example.batch.primary;
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(
entityManagerFactoryRef = "primaryEntityManagerFactory",
transactionManagerRef = "primaryTransactionManager",
basePackages = {"com.example.batch.primary"}
)
public class PrimaryDBConfig {
#Bean(name = "primaryDataSource")
#Primary
public DataSource primaryDatasource(){
DataSourceBuilder dataSourceBuilder = DataSourceBuilder.create()
.driverClassName("org.postgresql.Driver")
.url("jdbc:postgresql://localhost:5432/postgres")
.username("test")
.password("test");
return dataSourceBuilder.build();
}
#Bean(name = "primaryEntityManagerFactory")
#Primary
public LocalContainerEntityManagerFactoryBean primaryEntityManagerFactory(EntityManagerFactoryBuilder builder,
#Qualifier("primaryDataSource")
DataSource primaryDataSource){
return builder.dataSource(primaryDataSource)
.packages("com.example.batch.primary")
.build();
}
#Bean(name = "primaryTransactionManager")
public PlatformTransactionManager primaryTransactionManager(
#Qualifier("primaryEntityManagerFactory") EntityManagerFactory primaryEntityManagerFactory)
{
return new JpaTransactionManager(primaryEntityManagerFactory);
}
}
Here is the configuration of MongoDB:
package com.example.batch.secondary;
#EnableMongoRepositories(basePackages = "com.example.batch.secondary")
#Configuration
public class MongoDBConfig {
#Bean
public MongoClient mongo() {
ConnectionString connectionString = new ConnectionString("mongodb+srv://mongoadmin:blablabla.mongodb.net/?retryWrites=true&w=majority");
MongoClientSettings mongoClientSettings = MongoClientSettings.builder()
.applyConnectionString(connectionString)
.build();
return MongoClients.create(mongoClientSettings);
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
return new MongoTemplate(mongo(), "test");
}
}
Here is the RepositoryItemReader:
package com.example.batch.stepcomponents;
#Component
public class RepositoryReader extends RepositoryItemReader<Partner> {
public RepositoryReader(#Autowired PartnerRepository partnerRepository){
setRepository(partnerRepository);
setPageSize(1);
setSort(Map.of("id", Sort.Direction.ASC));
setMethodName("findAll");
}
}
Batch Config:
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
RepositoryReader repositoryReader;
#Autowired
CustomWriter customWriter;
#Autowired
CustomProcessor customProcessor;
#Bean
public Job createJob() {
return jobBuilderFactory.get("MyJob")
.incrementer(new RunIdIncrementer())
.flow(createStep())
.end()
.build();
}
#Bean
public Step createStep() {
return stepBuilderFactory.get("MyStep")
.<Partner, Student> chunk(1)
.reader(repositoryReader)
.processor(customProcessor)
.writer(customWriter)
.build();
}
}
So I tried taking out the EntityManagerFactory and the TransactionManager, and now it works. I guess they are already initialized automatically when starting up the server..
Yes, by default, if you provide a DataSource bean, Spring Batch will use a DataSourceTransactionManager, not the JPA one as you expect. This is explained in the Javadoc of EnableBatchProcessing:
The transaction manager provided by this annotation will be of type:
* ResourcelessTransactionManager if no DataSource is provided within the context
* DataSourceTransactionManager if a DataSource is provided within the context
In order to use the JPA transaction manager, you need to configure a custom a BatchConfigurer and override getTransactionManager, something like:
#Bean
public BatchConfigurer batchConfigurer(DataSource dataSource, EntityManagerFactory entityManagerFactory) {
return new DefaultBatchConfigurer(dataSource) {
#Override
public PlatformTransactionManager getTransactionManager() {
return new JpaTransactionManager(entityManagerFactory);
}
};
}
Note this will not be required anymore starting from v5, see:
Revisit the configuration of infrastructure beans with #EnableBatchProcessing
Spring Batch 5.0.0-M6 and 4.3.7 are out!
You can also set the JPA transaction manager on your step:
#Bean
public Step createStep(JpaTransactionManager jpaTransactionManager) {
return stepBuilderFactory.get("MyStep")
.<Partner, Student> chunk(1)
.reader(repositoryReader)
.processor(customProcessor)
.writer(customWriter)
.transactionManager(jpaTransactionManager)
.build();
}
Adding 'spring-data-jpa' as a dependency will automatically configure aJpaTransactionManager if no other TransactionManager is defined

Customized DB Connection won't rollback on error (Spring Batch : Chunk)

In my first Spring Batch chunk job (I'm a beginner),
I wrote db utility class for using in the chunk step,
since I need to overwrite default connection attributes set by application.properties.
However, it causes rollback problem (never rollback) on errors.
Any advices to improve these logics?
DBUtility class
// Construct
public DBUtility() {
... some business logic to get connection attributes dynamically.
}
#Bean
#BatchDataSource
#Primary
public DriverManagerDataSource getConnection() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverNameGottenAbove);
dataSource.setUrl(urlGottenAbove);
dataSource.setUsername(userNameGottenAbove);
dataSource.setPassword(passwordGottenAbove);
return dataSource;
}
#Bean
public DataSourceTransactionManager transactionManager(){
DataSourceTransactionManager dtm = new DataSourceTransationManager(getConnection());
dtm.setRollbackOnCommitFailure(true);
return dtm;
}
#Bean
public JdbcTemplate jdbcTemplate(){
return new JdbcTemplate(getConnection());
}
BatchConfiguration class(chunk)
#SpringBootApplication
#EnableBatchProcessing
#Transactional
public class BatchConfiguration extends DefaultBatchConfigurer {
...
private DBUtility dbUtil;
private DriverManagerDataSource ds;
private PlatformTransactionManager ptm;
// Construct
public BatchConfiguration(){
dbUtil = new DBUtility();
ds = dbUtil.getConnection();
ptm = dbUtil.transactionManager();
}
// Override
#Override
public JobRepository createJobRepository() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(ds);
factory.setTransactionManager(ptm);
JobRepository repo = factory.getObject();
return repo;
}
#Bean
public Job BatchJob() {
return jobBuilderFactory.get("BatchJob")
.incrementer(new RunIdIncrementer())
.listener(jobListener())
.flow(step1())
.end().build();
}
#Bean
public step step1() {
DefaultTransactionAttribute att = new DefaultTransactionAttribute();
att.setPropagationBehavior(Propagation.REQUIRED.value());
return stepBuilderFactory.get("step1")
.transactionManager(ptm)
.listener(stepListener())
.<Entity, Entity>chunk(COMMIT_INTERVAL)
.reader(reader())
.processor(processore())
.writer(writer())
.readerIsTransactionalQueue()
.transactionAttribute(att)
.build();
}
...
}
Thank you for checking this post.

How to set up two transaction managers?

I am working on a Spring application that has already set up a transaction manager.
In a configuration class it has already set up an entity manager reading from a persistence.xml and then sets up a JpaTransactionManager.
I am required to create a Spring Batch implementation and the problem is that, as I have found out from different posts, when using the #EnableBatchProcessing annotation it seems that a second transaction manager is registered and I cannot persist data inside my tasklets.
Is it possible to use two transaction managers or configure my application in a way that I will be able to persist my data?
Can you please provide me with sample code?
Thanks in advance.
EDIT:
This is the application config class, which already exists in the application:
#Configuration
#ComponentScan({
...
})
#EnableJpaRepositories("...")
#EnableTransactionManagement
#EnableJpaAuditing
#Import({SecurityConfig.class})
#PropertySource("classpath:application.properties")
public class ApplicationConfig {
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();
factory.setPersistenceXmlLocation("classpath:/META-INF/persistence.xml");
return factory;
}
#Bean
public PlatformTransactionManager transactionManager() {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory().getObject());
return transactionManager;
}
#Bean
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
}
this is my batch config:
#Configuration
#EnableBatchProcessing
public class BatchConfig {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
#Qualifier("entityManagerFactory")
private LocalEntityManagerFactoryBean batchEntityManagerFactory;
}
from which I am getting an:
Caused by: org.springframework.beans.factory.BeanCreationException: Could not autowire field: private org.springframework.orm.jpa.LocalEntityManagerFactoryBean com.xxx.xxx.xxx.configuration.BatchConfig.batchEntityManagerFactory; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type [org.springframework.orm.jpa.LocalEntityManagerFactoryBean] found for dependency: expected at least 1 bean which qualifies as autowire candidate for this dependency. Dependency annotations: {#org.springframework.beans.factory.annotation.Autowired(required=true), #org.sp
ringframework.beans.factory.annotation.Qualifier(value=entityManagerFactory)}
EDIT 2:
This is what I have done:
#EnableJpaRepositories("xxx")
#Configuration
#EnableBatchProcessing
#PropertySource("classpath:application.properties")
public class BatchConfig {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
private ReportReaderProcessor reportReaderProcessor;
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean factory = new LocalContainerEntityManagerFactoryBean();
factory.setPersistenceXmlLocation("classpath:/META-INF/persistence.xml");
return factory;
}
#Bean
public BatchConfigurer batchConfigurer() {
return new DefaultBatchConfigurer() {
#Override
public PlatformTransactionManager getTransactionManager() {
JpaTransactionManager jpaTransactionManager = new JpaTransactionManager();
jpaTransactionManager.setEntityManagerFactory(entityManagerFactory().getObject());
return jpaTransactionManager;
}
};
}
#Bean
public JobRepository jobRepository() throws Exception {
MapJobRepositoryFactoryBean factory = new MapJobRepositoryFactoryBean();
factory.setTransactionManager(batchConfigurer().getTransactionManager());
return (JobRepository) factory.getObject();
}
#Bean
public SimpleJobLauncher simpleJobLauncher() throws Exception {
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
#Bean
public Step readReports() {
return steps
.get("readReports")
.tasklet(reportReaderProcessor)
.build();
}
#Bean
public Job reportJob() {
return jobs
.get("submitReportJob")
.start(readReports())
.build();
}
}
but now I am getting an other error:
15:47:23,657 ERROR [stderr] (pool-36-thread-1) org.springframework.transaction.InvalidIsolationLevelException: DefaultJpaDialect does not support custom isolation levels due to limitations in standard JPA. Specific arrangements may be implemented in custom JpaDialect variants.
There is an open issue for this case here: https://jira.spring.io/browse/BATCH-2294 which is fixed in version 4.1.0.M3. To use a custom transaction manager, you need to provide a BatchConfigurer in your application context, for example:
#Bean
public BatchConfigurer batchConfigurer() {
return new DefaultBatchConfigurer() {
#Override
public PlatformTransactionManager getTransactionManager() {
return new MyTransactionManager();
}
};
}

Cannot autowired beans when separate configuration classes

I have a JavaConfig configurated Spring Batch job. The main job configuration file is CrawlerJobConfiguration. Before now, I have all the configuration (infrastructure, autowired beans, etc) in this class and it works fine. So I decided to separate the job configuration from autowired beans and infracstruture beans configuration and create another 2 configuration classes Beans and MysqlInfrastructureConfiguration.
But now I am having problems to run my job. I'm receiving a NullPointerException when the application try to use autowired fields indicating that the autowired is not working.
I put a breakpoint in the methods that create autowired beans to make sure that they are being called and really are, so I cannot realize what can be the problem.
java.lang.NullPointerException: null
at br.com.alexpfx.supermarket.batch.tasklet.StartCrawlerTasklet.execute(StartCrawlerTasklet.java:27) ~[classes/:na]
at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:406) ~[spring-batch-core-3.0.6.RELEASE.jar:3.0.6.RELEASE]
Main job configuration class:
#Configuration
#EnableBatchProcessing
public class CrawlerJobConfiguration {
#Autowired
private InfrastructureConfiguration infrastructureConfiguration;
#Autowired
private StepBuilderFactory steps;
#Autowired
Environment environment;
#Bean
public Job job(JobBuilderFactory jobs) {
Job theJob = jobs.get("job").start(crawlerStep()).next(processProductStep()).build();
((AbstractJob) theJob).setRestartable(true);
return theJob;
}
#Bean
public Step crawlerStep() {
TaskletStep crawlerStep = steps.get("crawlerStep").tasklet(crawlerTasklet()).build();
crawlerStep.setAllowStartIfComplete(true);
return crawlerStep;
}
#Bean
public Step processProductStep() {
TaskletStep processProductStep = steps.get("processProductStep")
.<TransferObject, Product>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer())
.build();
processProductStep.setAllowStartIfComplete(true);
return processProductStep;
}
private Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}
private ItemProcessor<TransferObject, Product> processor() {
return new ProductProcessor();
}
private ItemReader<TransferObject> reader() {
return new ProductItemReader();
}
private ItemWriter<Product> writer() {
return new HibernateProductsItemWriter();
}
}
Beans configuration class:
#Configuration
#EnableBatchProcessing
public class Beans {
#Bean
public Crawler crawler() {
return new RibeiraoCrawler(new UserAgentFactory());
}
#Bean
public ProductBo productBo() {
return new ProductBoImpl();
}
#Bean
public ProductDao productDao() {
return new ProductDaoImpl();
}
#Bean
public CrawlerListener listener() {
CrawlerListener listener = new RibeiraoListener();
return listener;
}
#Bean
public ProductList getProductList() {
return new ProductList();
}
}
MysqlInfrastructureConfiguration:
#Configuration
#EnableBatchProcessing
#PropertySource("classpath:database.properties")
#EnableJpaRepositories(basePackages = {"br.com.alexpfx.supermarket.domain"})
public class MysqlInfrastructureConfiguration implements InfrastructureConfiguration {
#Value("${jdbc.url}")
String url;
#Value("${jdbc.driverClassName}")
String driverClassName;
#Value("${jdbc.username}")
String username;
#Value("${jdbc.password}")
String password;
#Bean
#Override
public DataSource getDataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driverClassName);
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
return dataSource;
}
#Bean
#Override
public PlatformTransactionManager transactionManager() {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory());
transactionManager.setDataSource(getDataSource());
return transactionManager;
}
#Bean
#Override
public EntityManagerFactory entityManagerFactory() {
LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(getDataSource());
em.setPackagesToScan(new String[]{"br.com.alexpfx.supermarket.domain"});
JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(vendorAdapter);
em.setJpaProperties(additionalJpaProperties());
em.afterPropertiesSet();
return em.getObject();
}
private Properties additionalJpaProperties() {
Properties properties = new Properties();
properties.setProperty("hibernate.hbm2ddl.auto", "create");
properties.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
properties.setProperty("hibernate.show_sql", "true");
properties.setProperty("current_session_context_class", "thread");
return properties;
}
}
tasklet:
public class StartCrawlerTasklet implements Tasklet {
#Autowired
private Crawler crawler;
#Autowired
private CrawlerListener listener;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext) throws Exception {
crawler.setListener(listener);
crawler.setStopCondition(new TimeLimitStopCondition(1, TimeUnit.MINUTES));
crawler.crawl();
return RepeatStatus.FINISHED;
}
}
StartCrawlerTasklet use the autowired annotation, so it should be a bean as well. So change your code :
private Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}
to a bean definition:
#Bean
public Tasklet crawlerTasklet() {
return new StartCrawlerTasklet();
}

spring batch causing spring data not to commit transaction

I have a spring mvc application with a batch process powered by spring batch . If i remove the batch configurations, all transactions commit. If a batch job is run, the batch job completes successfully but not data is commited to the database
my configurations are as follows
#Configuration
#EnableWebMvc
#EnableAsync
#EnableScheduling
#EnableBatchProcessing(modular = false)
#EnableTransactionManagement
#EnableRabbit
#EnableJpaRepositories(basePackages = "zw.co.econet.workstation.repositories")
#ComponentScan(basePackages = {"zw.co.workstation"})
#PropertySource(value = {"classpath:application.properties"})
public class WebConfiguration extends WebMvcConfigurerAdapter {
#Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/resources/**").addResourceLocations("/resources/");
}
#Bean
public InternalResourceViewResolver jspViewResolver() {
InternalResourceViewResolver bean = new InternalResourceViewResolver();
bean.setPrefix("/WEB-INF/pages/");
bean.setSuffix(".jsp");
return bean;
}
}
Spring batch configuration :
#Configuration
public class BatchConfiguration {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Autowired
DataSource dataSource;
#Qualifier("creditQueueItemWriter")
#Autowired
private ItemWriter queueItemWriter;
#Qualifier("creditQueueProcessor")
#Autowired
private CreditQueueProcessor creditQueueProcessor;
#Qualifier("creditQueueReader")
#Autowired
private CreditQueueReader creditQueueReader;
#Qualifier("transactionManager")
#Autowired
private PlatformTransactionManager transactionManager;
#Bean
public AsyncTaskExecutor taskExecutor() {
return new SimpleAsyncTaskExecutor();
}
#Bean
#Autowired
protected Step creditSubscriberStep() throws Exception {
return steps.get("creditSubscriberStep")
.allowStartIfComplete(true)
.startLimit(3)
.chunk(10)
.reader(creditQueueReader)
.processor(creditQueueProcessor)
.writer(queueItemWriter)
.faultTolerant()
.build();
}
#Bean
public Job creditSubscribersJob() throws Exception {
JobBuilder builder = jobs.get("creditSubscriberJob");
return builder
.start(creditSubscriberStep())
.build();
}
#Bean
public JobLauncher jobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository());
jobLauncher.setTaskExecutor(taskExecutor());
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public JobRepository jobRepository() {
try {
JobRepositoryFactoryBean factoryBean = new JobRepositoryFactoryBean();
factoryBean.setDataSource(dataSource);
factoryBean.setTransactionManager(transactionManager);
factoryBean.setIsolationLevelForCreate("DEFAULT");
return factoryBean.getObject();
} catch (Exception e) {
return null;
}
}
#Bean
public DataSourceInitializer databasePopulator() {
ResourceDatabasePopulator populator = new ResourceDatabasePopulator();
populator.addScript(new ClassPathResource("org/springframework/batch/core/schema-mysql.sql"));
populator.setContinueOnError(true);
populator.setIgnoreFailedDrops(true);
DataSourceInitializer initializer = new DataSourceInitializer();
initializer.setDatabasePopulator(populator);
initializer.setDataSource(dataSource);
return initializer;
}
}
Credit writer :
#Service
public class CreditQueueItemWriter implements ItemWriter {
private Logger logger = LoggerFactory.getLogger(getClass());
#Qualifier("creditQueueService")
#Autowired
private CreditQueueService creditQueueService;
#Override
public void write(List<? extends CreditQueue> list) throws Exception {
logger.info("Processing credit list with size {}", list.size());
for (CreditQueue creditQueue : list) {
logger.info("Updating >>>> {} ", creditQueue);
creditQueue.setProcessingState("PROCESSED");
creditQueueService.save(creditQueue);
}
logger.info("chunk processed");
}
}

Resources