Spring Batch Multiple DataSource Session/Entitymanager Closed - spring

I am trying to Pull Records from SQLServer Database to Persist into Mysql Using Spring Boot and Sprin Batch(JpaPagingItemReader and JpaItemWriter).
I have configured Multiple Datasources.
How ever i am facing with below error.
org.springframework.batch.item.ItemStreamException: Error while closing item reader
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.close(AbstractItemCountingItemStreamItemReader.java:138)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.beans.factory.support.DisposableBeanAdapter.invokeCustomDestroyMethod(DisposableBeanAdapter.java:337)
at org.springframework.beans.factory.support.DisposableBeanAdapter.destroy(DisposableBeanAdapter.java:271)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.destroyBean(DefaultSingletonBeanRegistry.java:571)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.destroySingleton(DefaultSingletonBeanRegistry.java:543)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.destroySingleton(DefaultListableBeanFactory.java:957)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.destroySingletons(DefaultSingletonBeanRegistry.java:504)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.destroySingletons(DefaultListableBeanFactory.java:964)
at org.springframework.context.support.AbstractApplicationContext.destroyBeans(AbstractApplicationContext.java:1041)
at org.springframework.context.support.AbstractApplicationContext.doClose(AbstractApplicationContext.java:1017)
at org.springframework.context.support.AbstractApplicationContext.close(AbstractApplicationContext.java:967)
at org.springframework.batch.core.launch.support.CommandLineJobRunner.start(CommandLineJobRunner.java:377)
at org.springframework.batch.core.launch.support.CommandLineJobRunner.main(CommandLineJobRunner.java:597)
at net.com.org.batch.MyApplication.main(MyApplication.java:15)
Caused by: java.lang.IllegalStateException: Session/EntityManager is closed
at org.hibernate.internal.AbstractSharedSessionContract.checkOpen(AbstractSharedSessionContract.java:344)
at org.hibernate.engine.spi.SharedSessionContractImplementor.checkOpen(SharedSessionContractImplementor.java:137)
at org.hibernate.internal.AbstractSharedSessionContract.checkOpenOrWaitingForAutoClose(AbstractSharedSessionContract.java:350)
at org.hibernate.internal.SessionImpl.close(SessionImpl.java:413)
at org.springframework.batch.item.database.JpaPagingItemReader.doClose(JpaPagingItemReader.java:232)
at org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader.close(AbstractItemCountingItemStreamItemReader.java:135)
... 17 common frames omitted
20:10:55.875 [main] DEBUG org.springframework.beans.factory.support.DefaultListableBeanFactory - Retrieved dependent beans for bean 'jpaMappingContext': [mySqljobRepository, sqlServerLogsRepository]
Below is my Batch,Step Configuration
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Autowired
#Qualifier("mysqlEntityManager")
private LocalContainerEntityManagerFactoryBean mysqlLocalContainerEntityManagerFactoryBean;
#Autowired
#Qualifier("secondarySqlEntityManager")
private LocalContainerEntityManagerFactoryBean localContainerEntityManagerFactoryBean;
#Autowired
#Qualifier("mysqlTransactionManager")
private PlatformTransactionManager mySqlplatformTransactionManager;
#Autowired
#Qualifier("secondaryTransactionManager")
private PlatformTransactionManager secondaryTransactionManager;
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Bean
public JpaPagingItemReader itemReader(PlatformTransactionManager secondaryTransactionManager) {
JpaPagingItemReader<SqlServerJobLogs> serverJobLogsJpaPagingItemReader = new JpaPagingItemReader<>();
serverJobLogsJpaPagingItemReader.setMaxItemCount(1000);
serverJobLogsJpaPagingItemReader.setPageSize(100);
serverJobLogsJpaPagingItemReader.setEntityManagerFactory(localContainerEntityManagerFactoryBean.getNativeEntityManagerFactory());
serverJobLogsJpaPagingItemReader.setQueryString("select p from SqlServerJobLogs p");
return serverJobLogsJpaPagingItemReader;
}
#Bean
public ItemProcessor itemProcessor() {
return new DataItemProcessor();
}
#Bean
public ItemWriter itemWriter(PlatformTransactionManager mySqlplatformTransactionManager) {
DataWriter dataWriter = new DataWriter();
return dataWriter;
}
#Bean
public Step step() {
return stepBuilderFactory.get("myJob").chunk(100).reader(itemReader(secondaryTransactionManager)).processor(itemProcessor()).writer(itemWriter(mySqlplatformTransactionManager)).build();
}
#Bean(name = "myJob")
public Job myJob() throws Exception {
return jobBuilderFactory.get("myJob").start(step()).build();
}
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager(){
return new ResourcelessTransactionManager();
}
#Bean
public JobRepository jobRepository() throws Exception{
MapJobRepositoryFactoryBean mapJobRepositoryFactoryBean = new MapJobRepositoryFactoryBean(resourcelessTransactionManager());
return mapJobRepositoryFactoryBean.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher() throws Exception {
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
I have Tried to Configure BatchConfigurer.But No luck.
Please let me know if i need to configure anything else apart from the above mentioned details
Thanks in Advance

I would like to answer the question.
A big thanks to #MahmoudBenHassine
Providing a custom transaction manager by overriding
DefaultBatchConfigurer#getTransactionManager works only with Spring
Batch v4.1+ as said in comments. We need to use Spring Boot v2.1 to
have Spring Batch 4.1

Related

When I add Spring batch configuration I get error

In my project I use multiple schema (multiple dataSource)
When I add Spring batch configuration I get error:No qualifying bean of type 'org.springframework.transaction.PlatformTransactionManager' available: expected single matching bean but found 5
but when I remove spring batch configuration the error is removed.
#Configuration
#EnableBatchProcessing
#Import(MyDataSourceClassConfig.class)
public class TestBatchJobConfiguration extends DefaultBatchConfigurer {
#Autowired
private JobBuilderFactory jobBuilderFactory;
#Autowired
private StepBuilderFactory stepBuilderFactory;
....
}
if you also facing same problem, you need to verify two point.
First you have to do not create a bean transaction named transactionManager (this default one used by spring batch)
Second you need to override getTransactionManager to specify which transactionManager you want to use and which dataSource you want to use
#Autowired
#Qualifier("myPersonalTransactionManager")
private PlatformTransactionManager transactionManager;
#Override
public PlatformTransactionManager getTransactionManager() {
return transactionManager;
}
#Override
#Autowired
public void setDataSource(#Qualifier("thirdDataSource") DataSource batchDataSource) {
super.setDataSource(batchDataSource);
}

Define an in-memory JobRepository

I'm testing Spring Batch using Spring boot. My need is to define jobs working on an Oracle Database but I don't want to save jobs and steps states inside this DB.
I've read in the documentation I can use a in-memory repository with the MapJobRepositoryFactoryBean.
Then, I've implemented this bean:
#Bean
public JobRepository jobRepository() {
MapJobRepositoryFactoryBean factoryBean = new MapJobRepositoryFactoryBean(new ResourcelessTransactionManager());
try {
JobRepository jobRepository = factoryBean.getObject();
return jobRepository;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
But when my job starts, the first thing Spring Batch does is to create the table in the Oracle DB and continues to use the Oracle datasource. It's like my JobRepository definition isn't taken account.
What did I miss ?
EDIT: I'm using Spring Boot 1.5.3 and Spring Batch 3.0.7
With SpringBoot 2.x, the solution is simpler.
You have to extend the DefaultBatchConfigurer class like this:
#Component
public class NoPersistenceBatchConfigurer extends DefaultBatchConfigurer {
#Override
public void setDataSource(DataSource dataSource) {
}
}
Without datasource, the framework automatically switches to use the MapJobRepository.
A few things here:
If you have a DataSource configured in your ApplicationContext, by default Spring Batch will try to use it.
In order to not use a DataSource when one is available within the ApplicationContext, you'll need to create your own BatchConfigurer. You can do that by extending the DefaultBatchConfigurer.
Don't use the MapJobRepository except only for testing purposes. I has a number of issues (thread safety, etc) and is not recommended for production use. Use an in memory database like HSQLDB instead (you'll still need to create your own BatchConfigurer to do so).
Thank the comment of pvpkiran I've found my problem. It's necessary to define a JobLauncher bean.
Below an example:
#Bean
public JobRepository jobRepository() {
MapJobRepositoryFactoryBean factoryBean = new MapJobRepositoryFactoryBean(new ResourcelessTransactionManager());
try {
JobRepository jobRepository = factoryBean.getObject();
return jobRepository;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
#Bean
public JobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
If using Spring Boot and #EnableBatchProcessing, you would extend the DefaultBatchConfigurer and override the createJobRepository method. Create a ResourcelessTransactionManager and JobRepository using MapJobRepositoryFactoryBean, the rest of the beans will be auto created by Spring Boot.
#Configuration
public class InMemoryBatchContextConfigurer extends DefaultBatchConfigurer {
#Bean
private ResourcelessTransactionManager resoucelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Override
protected JobRepository createJobRepository() throws Exception {
MapJobRepositoryFactoryBean factoryBean = new MapJobRepositoryFactoryBean();
factoryBean.setTransactionManager(resoucelessTransactionManager());
return factoryBean.getObject();
}
}`
Extend DefaultBatchConfigurer class and override createJobRepository method just like below.
#Configuration
public class InMemoryBatchConfigurer extends DefaultBatchConfigurer {
#Override
protected JobRepository createJobRepository() throws Exception {
return new MapJobRepositoryFactoryBean().getObject();
}
}

Quartz JDBCJobStore with RoutingDataSource

For my application, we are using the spring's
org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource
The target dataSources are configured and chosen based on request's domain URL.
Eg:
qa.example.com ==> target datasource = DB1
qa-test.example.com ==> target datasource = DB2
Following is the configuration for the same
#Bean(name = "dataSource")
public DataSource dataSource() throws PropertyVetoException, ConfigurationException {
EERoutingDatabase routingDB = new EERoutingDatabase();
Map<Object, Object> targetDataSources = datasourceList();
routingDB.setTargetDataSources(targetDataSources);
return routingDB;
}
public class EERoutingDatabase extends AbstractRoutingDataSource {
#Override
protected Object determineCurrentLookupKey() {
// This is derived from the request's URL/Domain
return SessionUtil.getDataSourceHolder();
}
}
The task is now using Quartz JDBCJobStore to store the quartz jobs/triggers.
The preferred option is using JobStoreCMT.
We used the following config
#Configuration
public class QuartzConfig {
private static final Logger LOG = LoggerFactory.getLogger(QuartzConfig.class);
private static final String QUARTZ_CONFIG_FILE = "ee-quartz.properties";
#Autowired
private DataSource dataSource;
#Autowired
private PlatformTransactionManager transactionManager;
#Autowired
private ApplicationContext applicationContext;
/**
* Spring wrapper over Quartz Scheduler bean
*/
#Bean(name="quartzRealTimeScheduler")
SchedulerFactoryBean schedulerFactoryBean() {
LOG.info("Creating QUARTZ Scheduler for real time Job invocation");
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setConfigLocation(new ClassPathResource(QUARTZ_CONFIG_FILE));
factory.setDataSource(dataSource);
factory.setTransactionManager(transactionManager);
factory.setJobFactory(springBeanJobFactory());
factory.setWaitForJobsToCompleteOnShutdown(true);
factory.setApplicationContextSchedulerContextKey("applicationContext");
return factory;
}
#Bean
public SpringBeanJobFactory springBeanJobFactory() {
AutoWiringSpringBeanJobFactory jobFactory = new AutoWiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
jobFactory.setIgnoredUnknownProperties("applicationContext");
return jobFactory;
}
}
and following is the config in quartz properties file (ee-quartz.properties)
org.quartz.scheduler.instanceId=AUTO
org.quartz.jobStore.useProperties=false
org.quartz.jobStore.misfireThreshold: 60000
org.quartz.jobStore.driverDelegateClass = org.quartz.impl.jdbcjobstore.PostgreSQLDelegate
On starting the application, following exception occurs
Caused by: java.lang.IllegalStateException: Cannot determine target DataSource for lookup key [null]
at org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource.determineTargetDataSource(AbstractRoutingDataSource.java:202) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at com.expertly.config.EERoutingDatabase.determineTargetDataSource(EERoutingDatabase.java:60) ~[EERoutingDatabase.class:na]
at org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource.getConnection(AbstractRoutingDataSource.java:164) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at org.springframework.jdbc.datasource.DataSourceUtils.doGetConnection(DataSourceUtils.java:111) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at org.springframework.jdbc.datasource.DataSourceUtils.getConnection(DataSourceUtils.java:77) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at org.springframework.jdbc.support.JdbcUtils.extractDatabaseMetaData(JdbcUtils.java:289) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at org.springframework.jdbc.support.JdbcUtils.extractDatabaseMetaData(JdbcUtils.java:329) ~[spring-jdbc-4.1.6.RELEASE.jar:4.1.6.RELEASE]
at org.springframework.scheduling.quartz.LocalDataSourceJobStore.initialize(LocalDataSourceJobStore.java:149) ~[spring-context-support-4.0.1.RELEASE.jar:4.0.1.RELEASE]
at org.quartz.impl.StdSchedulerFactory.instantiate(StdSchedulerFactory.java:1321) ~[quartz-2.2.2.jar:na]
at org.quartz.impl.StdSchedulerFactory.getScheduler(StdSchedulerFactory.java:1525) ~[quartz-2.2.2.jar:na]
at org.springframework.scheduling.quartz.SchedulerFactoryBean.createScheduler(SchedulerFactoryBean.java:599) ~[spring-context-support-4.0.1.RELEASE.jar:4.0.1.RELEASE]
at org.springframework.scheduling.quartz.SchedulerFactoryBean.afterPropertiesSet(SchedulerFactoryBean.java:482) ~[spring-context-support-4.0.1.RELEASE.jar:4.0.1.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1612) ~[spring-beans-4.0.1.RELEASE.jar:4.0.1.RELEASE]
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1549) ~[spring-
beans-4.0.1.RELEASE.jar:4.0.1.RELEASE]
It seems that
Quartz is trying to create connections with my datasource upfront.
Since my dataSource isn't concrete one (its routing dataSource) and in addition doesn't have knowledge to which target Db to connect (at config time), it fails
Do we have any provision, where quartz can be used with RoutingDataSource? If Not, what would be the next best thing?
Ideally you can try making SchedulerFactoryBean as #Lazy.
But It seems lazy initialization will not work bug, there is also a work around listed in the comments.
Create schedulerFactory bean dynamically after
ContextRefreshedEvent received on root context.
Let us know, If this works.

Spring Batch Jpa Repository Save not committing the data

I am using Spring Batch and JPA to process a Batch Job and perform updates. I am using the default Repository implementations.
And I am using a repository.save to save the modified object in the processor.
Also,I don't have any #Transactional Annotation specified in the processor or writer.
I don't see any exceptions throughout. The selects happen fine.
Is there any setting like "setAutoCommit(true)" that I should be using for the JPA to save the data in the DB.
Here is my step,reader and writer config:
Also, my config class is annotated with EnableBatchProcessing
#EnableBatchProcessing
public class UpgradeBatchConfiguration extends DefaultBatchConfigurer{
#Autowired
private PlatformTransactionManager transactionManager;
#Override
protected JobRepository createJobRepository() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(getdataSource());
factory.setTransactionManager(transactionManager);
factory.setTablePrefix("CFTES_OWNER.BATCH_");
factory.afterPropertiesSet();
return factory.getObject();
}
#Bean(name = "updateFilenetJobStep")
public Step jobStep(StepBuilderFactory stepBuilderFactory,
#Qualifier("updateFileNetReader") RepositoryItemReader reader,
#Qualifier("updateFileNetWriter") ItemWriter writer,
#Qualifier("updateFileNetProcessor") ItemProcessor processor) {
return stepBuilderFactory.get("jobStep").allowStartIfComplete(true).chunk(1).reader(reader).processor(processor)
.writer(writer).transactionManager(transactionManager).build();
}
#Bean(name = "updateFileNetWriter")
public ItemWriter getItemWriter() {
return new BatchItemWriter();
}
#Bean(name = "updateFileNetReader")
public RepositoryItemReader<Page<TermsAndConditionsErrorEntity>> getItemReader(
TermsAndConditionsErrorRepository repository) {
RepositoryItemReader<Page<TermsAndConditionsErrorEntity>> reader = new RepositoryItemReader<Page<TermsAndConditionsErrorEntity>>();
reader.setRepository(repository);
reader.setMethodName("findAll");
HashMap<String, Direction> map = new HashMap<String, Direction>();
map.put("transactionId", Direction.ASC);
reader.setSort(map);
return reader;
}
}
And in the writer this is what I am using Repository.save
repository.save(entity);
I was able to solve this by injecting a JPATransactionManager throughout(the job repository, job, step etc) , instead of the Autowired PlatformTransactionManager.

spring-boot-starter-jta-atomikos and spring-boot-starter-batch

Is it possible to use both these starters in a single application?
I want to load records from a CSV file into a database table. The Spring Batch tables are stored in a different database, so I assume I need to use JTA to handle the transaction.
Whenever I add #EnableBatchProcessing to my #Configuration class it configures a PlatformTransactionManager, which stops this being auto-configured by Atomikos.
Are there any spring boot + batch + jta samples out there that show how to do this?
Many Thanks,
James
I just went through this and I found something that seems to work. As you note, #EnableBatchProcessing causes a DataSourceTransactionManager to be created, which messes up everything. I'm using modular=true in #EnableBatchProcessing, so the ModularBatchConfiguration class is activated.
What I did was to stop using #EnableBatchProcessing and instead copy the entire ModularBatchConfiguration class into my project. Then I commented out the transactionManager() method, since the Atomikos configuration creates the JtaTransactionManager. I also had to override the jobRepository() method, because that was hardcoded to use the DataSourceTransactionManager created inside DefaultBatchConfiguration.
I also had to explicitly import the JtaAutoConfiguration class. This wires everything up correctly (according to the Actuator's "beans" endpoint - thank god for that). But when you run it the transaction manager throws an exception because something somewhere sets an explicit transaction isolation level. So I also wrote a BeanPostProcessor to find the transaction manager and call txnMgr.setAllowCustomIsolationLevels(true);
Now everything works, but while the job is running, I cannot fetch the current data from batch_step_execution table using JdbcTemplate, even though I can see the data in SQLYog. This must have something to do with transaction isolation, but I haven't been able to understand it yet.
Here is what I have for my configuration class, copied from Spring and modified as noted above. PS, I have my DataSource that points to the database with the batch tables annotated as #Primary. Also, I changed my DataSource beans to be instances of org.apache.tomcat.jdbc.pool.XADataSource; I'm not sure if that's necessary.
#Configuration
#Import(ScopeConfiguration.class)
public class ModularJtaBatchConfiguration implements ImportAware
{
#Autowired(required = false)
private Collection<DataSource> dataSources;
private BatchConfigurer configurer;
#Autowired
private ApplicationContext context;
#Autowired(required = false)
private Collection<BatchConfigurer> configurers;
private AutomaticJobRegistrar registrar = new AutomaticJobRegistrar();
#Bean
public JobRepository jobRepository(DataSource batchDataSource, JtaTransactionManager jtaTransactionManager) throws Exception
{
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(batchDataSource);
factory.setTransactionManager(jtaTransactionManager);
factory.afterPropertiesSet();
return factory.getObject();
}
#Bean
public JobLauncher jobLauncher() throws Exception {
return getConfigurer(configurers).getJobLauncher();
}
// #Bean
// public PlatformTransactionManager transactionManager() throws Exception {
// return getConfigurer(configurers).getTransactionManager();
// }
#Bean
public JobExplorer jobExplorer() throws Exception {
return getConfigurer(configurers).getJobExplorer();
}
#Bean
public AutomaticJobRegistrar jobRegistrar() throws Exception {
registrar.setJobLoader(new DefaultJobLoader(jobRegistry()));
for (ApplicationContextFactory factory : context.getBeansOfType(ApplicationContextFactory.class).values()) {
registrar.addApplicationContextFactory(factory);
}
return registrar;
}
#Bean
public JobBuilderFactory jobBuilders(JobRepository jobRepository) throws Exception {
return new JobBuilderFactory(jobRepository);
}
#Bean
// hopefully this will autowire the Atomikos JTA txn manager
public StepBuilderFactory stepBuilders(JobRepository jobRepository, JtaTransactionManager ptm) throws Exception {
return new StepBuilderFactory(jobRepository, ptm);
}
#Bean
public JobRegistry jobRegistry() throws Exception {
return new MapJobRegistry();
}
#Override
public void setImportMetadata(AnnotationMetadata importMetadata) {
AnnotationAttributes enabled = AnnotationAttributes.fromMap(importMetadata.getAnnotationAttributes(
EnableBatchProcessing.class.getName(), false));
Assert.notNull(enabled,
"#EnableBatchProcessing is not present on importing class " + importMetadata.getClassName());
}
protected BatchConfigurer getConfigurer(Collection<BatchConfigurer> configurers) throws Exception {
if (this.configurer != null) {
return this.configurer;
}
if (configurers == null || configurers.isEmpty()) {
if (dataSources == null || dataSources.isEmpty()) {
throw new UnsupportedOperationException("You are screwed");
} else if(dataSources != null && dataSources.size() == 1) {
DataSource dataSource = dataSources.iterator().next();
DefaultBatchConfigurer configurer = new DefaultBatchConfigurer(dataSource);
configurer.initialize();
this.configurer = configurer;
return configurer;
} else {
throw new IllegalStateException("To use the default BatchConfigurer the context must contain no more than" +
"one DataSource, found " + dataSources.size());
}
}
if (configurers.size() > 1) {
throw new IllegalStateException(
"To use a custom BatchConfigurer the context must contain precisely one, found "
+ configurers.size());
}
this.configurer = configurers.iterator().next();
return this.configurer;
}
}
#Configuration
class ScopeConfiguration {
private StepScope stepScope = new StepScope();
private JobScope jobScope = new JobScope();
#Bean
public StepScope stepScope() {
stepScope.setAutoProxy(false);
return stepScope;
}
#Bean
public JobScope jobScope() {
jobScope.setAutoProxy(false);
return jobScope;
}
}
I found a solution where I was able to keep #EnableBatchProcessing but had to implement BatchConfigurer and atomikos beans, see my full answer in this so answer.

Resources