spring batch : typeMismatch.java.sql.Date,typeMismatch - spring

i'm trying to send this csv file to database with spring batch
users.csv
2021-06-22,test1#gmail.com, testFullname1, testMatricule1, 1234, testUsername1
2021-06-22,test2#gmail.com, testFullname2, testMatricule2, 0000, testUsername2
and i have this error
Failed to convert property value of type 'java.lang.String' to required type 'java.sql.Date' for property
here's batchConfig
package sofrecom.collaborateur.config;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import sofrecom.collaborateur.model.DAOUser;
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public DataSource dataSource;
#Bean
public FlatFileItemReader<DAOUser> reader() {
FlatFileItemReader<DAOUser> reader = new FlatFileItemReader<DAOUser>();
reader.setResource(new ClassPathResource("users.csv"));
reader.setLineMapper(new DefaultLineMapper<DAOUser>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "dateIntegration","email","fullname","matricule","password","username" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<DAOUser>() {{
setTargetType(DAOUser.class);
}});
}});
return reader;
}
#Bean
public UserItemProcessor processor() {
return new UserItemProcessor();
}
#Bean
public JdbcBatchItemWriter<DAOUser> writer() {
JdbcBatchItemWriter<DAOUser> writer = new JdbcBatchItemWriter<DAOUser>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<DAOUser>());
writer.setSql("INSERT INTO user ( date_integration,email,fullname,matricule,password,username) VALUES ( :dateIntegration,:email,:fullname,:matricule,:password,:username)");
writer.setDataSource(dataSource);
return writer;
}
#Bean
public Job importUserJob(JobCompletionNotificationListener listener) {
return jobBuilderFactory.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(step1())
.end()
.build();
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1")
.<DAOUser, DAOUser> chunk(10)
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
}
and this is user Item proccessor
package sofrecom.collaborateur.config;
import java.sql.Date;
import java.text.SimpleDateFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.password.PasswordEncoder;
import sofrecom.collaborateur.model.DAOUser;
public class UserItemProcessor implements ItemProcessor<DAOUser, DAOUser> {
private static final Logger log = LoggerFactory.getLogger(UserItemProcessor.class);
#Autowired
private PasswordEncoder bcryptEncoder;
#Override
public DAOUser process(final DAOUser person) throws Exception {
final String password = bcryptEncoder.encode(person.getUsername());
final DAOUser transformedPerson = new DAOUser(person.getDateIntegration(),person.getEmail(),person.getFullname(),person.getMatricule(),password,person.getUsername());
log.info("Converting (" + person + ") into (" + transformedPerson + ")");
return transformedPerson;
}
}
any solution please !!

The problem is that the BeanWrapperFieldSetMapper does not know, by default, how to convert a String like 2021-06-22 to an object of type java.sql.Date (which is the field dateIntegration) in your domain object DAOUser. The way to tell this mapper how to deal with custom conversions is by registering a ConversionService. This conversion service should have a converter from String to java.sql.Date registered in it. Here is a quick example:
#Bean
public FlatFileItemReader<DAOUser> reader() {
DefaultConversionService conversionService = new DefaultConversionService();
conversionService.addConverter(new Converter<String, Date>() { // java.sql.Date
#Override
public Date convert(String s) {
return Date.valueOf(s);
}
});
BeanWrapperFieldSetMapper<DAOUser> fieldSetMapper = new BeanWrapperFieldSetMapper<>();
fieldSetMapper.setConversionService(conversionService);
fieldSetMapper.setTargetType(DAOUser.class);
FlatFileItemReader<DAOUser> reader = new FlatFileItemReader<>();
reader.setResource(new ClassPathResource("users.csv"));
reader.setLineMapper(new DefaultLineMapper<DAOUser>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "dateIntegration","email","fullname","matricule","password","username" });
}});
setFieldSetMapper(fieldSetMapper);
}});
return reader;
}
You can find a complete example based on what you shared in this repository.

Related

How to add AsyncItemWriter in Spring Batch to a Step correctly?

In a Spring Batch Job like the following I'm trying to use an AsyncWriter
#Bean
public Step readWriteStep() throws Exception {
return stepBuilderFactory.get("readWriteStep")
.listener(listener)
.<Data, Data>chunk(10)
.reader(dataItemReader())
.writer(dataAsyncWriter())
.build();
}
#Bean
public AsyncItemWriter<Data> dataAsyncWriter() throws Exception {
AsyncItemWriter<Data> asyncItemWriter = new AsyncItemWriter<>();
asyncItemWriter.setDelegate(dataItemWriter);
asyncItemWriter.afterPropertiesSet();
return asyncItemWriter;
}
If I try like this intelliJ complains:
Required type: ItemWriter <? super Data>
Provided: AsyncItemWriter <Data>
When I change .<Data, Data>chunk(10) to .<Data, Future<Data>>chunk(10) intelliJ does not make any warning, but when I run the Job, I get the following Exception:
java.lang.ClassCastException: Data cannot be cast to class java.util.concurrent.Future Data is in unnamed module of loader 'app';
java.util.concurrent.Future is in module java.base of loader 'bootstrap'
For what is the first and the second parameter here? .<Data, Data>chunk(10)?
Are these two parameters for what the processor takes and the second what the processor is giving back?
How do I solve this Problem?
Your example should compile if you change the step definition to use the following:
.<Data, Future<Data>>chunk(10)
That said, I'm not sure this will work correctly at runtime because the AsyncItemWriter is expected to unwrap items from their enclosing Futures, where these Futures are created by an AsyncItemProcessor.
In other words, AsyncItemWriter and AsyncItemProcessor should be used in conjunction for this pattern to work. Here is a quick example with both of them:
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Future;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.integration.async.AsyncItemProcessor;
import org.springframework.batch.integration.async.AsyncItemWriter;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.ListItemReader;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
#Configuration
#EnableBatchProcessing
public class SO72477556 {
#Bean
public ItemReader<Data> dataItemReader() {
return new ListItemReader<Data>(Arrays.asList());
}
#Bean
public ItemProcessor<Data, Data> dataItemProcessor() {
return new ItemProcessor<Data, Data>() {
#Override
public Data process(Data item) throws Exception {
return item;
}
};
}
#Bean
public AsyncItemProcessor<Data, Data> asyncDataItemProcessor() {
AsyncItemProcessor<Data, Data> asyncItemProcessor = new AsyncItemProcessor<>();
asyncItemProcessor.setDelegate(dataItemProcessor());
asyncItemProcessor.setTaskExecutor(new SimpleAsyncTaskExecutor());
return asyncItemProcessor;
}
#Bean
public ItemWriter<Data> dataItemWriter() {
return new ItemWriter<Data>() {
#Override
public void write(List<? extends Data> items) throws Exception {
}
};
}
#Bean
public AsyncItemWriter<Data> dataAsyncWriter() throws Exception {
AsyncItemWriter<Data> asyncItemWriter = new AsyncItemWriter<>();
asyncItemWriter.setDelegate(dataItemWriter());
asyncItemWriter.afterPropertiesSet();
return asyncItemWriter;
}
#Bean
public Step readWriteStep(StepBuilderFactory stepBuilderFactory) throws Exception {
return stepBuilderFactory.get("readWriteStep")
.<Data, Future<Data>>chunk(10)
.reader(dataItemReader())
.processor(asyncDataItemProcessor())
.writer(dataAsyncWriter())
.build();
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) throws Exception {
return jobs.get("job")
.start(readWriteStep(steps))
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(SO72477556.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
static class Data {}
}

Spring batch .How to chain multiple itemProcessors with diffrent types?

i have to compose 2 processors as following :
processor 1 implement the itemProcessor Interface with itemProcessor<A,B> (transforming data).
processor 2 implement the itemProcessor Interface with itemProcessor<B,B>.(treat transformed data).
the CompositeItemProcessor<I, O> requires the delegates to be in the same type , moreover when passing it to the Step the step is already configure with fixed types <A,B>.
how i could chain these processors with different types and assign it to the step processor ?
You need to declare your step as well as your composite processor with <A, B>. Here is a quick example:
import java.util.Arrays;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.CompositeItemProcessor;
import org.springframework.batch.item.support.ListItemReader;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
#EnableBatchProcessing
public class MyJobConfiguration {
#Bean
public ItemReader<A> itemReader() {
return new ListItemReader<>(Arrays.asList(new A("a1"), new A("a2")));
}
#Bean
public ItemProcessor<A, B> itemProcessor1() {
return item -> new B(item.name);
}
#Bean
public ItemProcessor<B, B> itemProcessor2() {
return item -> item; // TODO process item as needed
}
#Bean
public ItemProcessor<A, B> compositeItemProcessor() {
CompositeItemProcessor<A, B> compositeItemProcessor = new CompositeItemProcessor<>();
compositeItemProcessor.setDelegates(Arrays.asList(itemProcessor1(), itemProcessor2()));
return compositeItemProcessor;
}
#Bean
public ItemWriter<B> itemWriter() {
return items -> {
for (B item : items) {
System.out.println("item = " + item.name);
}
};
}
#Bean
public Job job(JobBuilderFactory jobs, StepBuilderFactory steps) {
return jobs.get("job")
.start(steps.get("step")
.<A, B>chunk(2)
.reader(itemReader())
.processor(compositeItemProcessor())
.writer(itemWriter())
.build())
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJobConfiguration.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
class A {
String name;
public A(String name) { this.name = name; }
}
class B {
String name;
public B(String name) { this.name = name; }
}
}

Implementing JUnit test cases in Spring batch Jobs calling from Controller

I am new to Junit. I am literally struggling to write Junit test cases for my code. I am working with Spring boot, Batch and JPA. I configured Jobs,file read and write in to DB everything working fine. But when it comes to JUnit, I don't even get an idea to write code. Can anyone help me. Below is my code
FileProcessController.java
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParameter;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import com.sc.batchservice.model.StatusResponse;
import lombok.extern.slf4j.Slf4j;
#Slf4j
#RestController
#RequestMapping("/fileProcess")
public class FileProcessController {
#Autowired
private JobLauncher jobLauncher;
#Autowired
#Qualifier("euronetFileParseJob")
private Job job;
#GetMapping(path = "/process")
public #ResponseBody StatusResponse process() throws Exception {
try {
Map<String, JobParameter> parameters = new HashMap<>();
parameters.put("date", new JobParameter(new Date()));
jobLauncher.run(job, new JobParameters(parameters));
return new StatusResponse(true);
} catch (Exception e) {
log.error("Exception", e);
Throwable rootException = ExceptionUtils.getRootCause(e);
String errMessage = rootException.getMessage();
log.info("Root cause is instance of JobInstanceAlreadyCompleteException --> "+(rootException instanceof JobInstanceAlreadyCompleteException));
if(rootException instanceof JobInstanceAlreadyCompleteException){
log.info(errMessage);
return new StatusResponse(false, "This job has been completed already!");
} else{
throw e;
}
}
}
}
BatchConfig.java
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import com.sc.batchservice.model.DetailsDTO;
#Configuration
#EnableBatchProcessing
public class BatchConfig {
private JobBuilderFactory jobBuilderFactory;
#Autowired
public void setJobBuilderFactory(JobBuilderFactory jobBuilderFactory) {
this.jobBuilderFactory = jobBuilderFactory;
}
#Autowired
StepBuilderFactory stepBuilderFactory;
#Value("file:${input.files.location}${input.file.pattern}")
private Resource[] fileInputs;
#Value("${euronet.file.column.names}")
private String filecolumnNames;
#Value("${euronet.file.column.lengths}")
private String fileColumnLengths;
#Value("${input.files.location}")
private String inputFileLocation;
#Value("${input.file.pattern}")
private String inputFilePattern;
#Autowired
FlatFileWriter flatFileWriter;
#Bean
public Job euronetFileParseJob() {
return jobBuilderFactory.get("euronetFileParseJob")
.incrementer(new RunIdIncrementer())
.start(fileStep())
.build();
}
public Step fileStep() {
return stepBuilderFactory.get("fileStep")
.<DetailsDTO, DetailsDTO>chunk(10)
.reader(new FlatFileReader(fileInputs, filecolumnNames, fileColumnLengths))
.writer(flatFileWriter)
.build();
}
}
FlatFileReader.java
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.MultiResourceItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.mapping.FieldSetMapper;
import org.springframework.batch.item.file.transform.FixedLengthTokenizer;
import org.springframework.core.io.Resource;
import com.sc.batchservice.model.DetailsDTO;
import com.sc.batchservice.util.CommonUtil;
import lombok.extern.slf4j.Slf4j;
#Slf4j
public class FlatFileReader extends MultiResourceItemReader<DetailsDTO> {
public EuronetFlatFileReader(Resource[] fileInputs, String filecolumnNames, String fileColumnLengths) {
setResources(fileInputs);
setDelegate(reader(filecolumnNames, fileColumnLengths));
setStrict(true);
}
private FlatFileItemReader<DetailsDTO> reader(String filecolumnNames, String fileColumnLengths) {
FlatFileItemReader<DetailsDTO> flatFileItemReader = new FlatFileItemReader<>();
FixedLengthTokenizer tokenizer = CommonUtil.fixedLengthTokenizer(filecolumnNames, fileColumnLengths);
FieldSetMapper<DetailsDTO> mapper = createMapper();
DefaultLineMapper<DetailsDTO> lineMapper = new DefaultLineMapper<>();
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(mapper);
flatFileItemReader.setLineMapper(lineMapper);
return flatFileItemReader;
}
/*
* Mapping column data to DTO
*/
private FieldSetMapper<DetailsDTO> createMapper() {
BeanWrapperFieldSetMapper<DetailsDTO> mapper = new BeanWrapperFieldSetMapper<>();
try {
mapper.setTargetType(DetailsDTO.class);
} catch(Exception e) {
log.error("Exception in mapping column data to dto ", e);
}
return mapper;
}
}
I have Writer,Entity and model classes also, But if any example or idea upto this code, I can proceed with those classes.

Spring Batch ResourcelessTransactionManager messes with persistence.xml?

I am working on an application and have been asked to implement a scheduled spring batch job. I have set up a configuration file where I set a #Bean ResourcelessTransactionManager but it seems to mess with the persistence.xml.
There is already a persistence xml in an other module, there is no compilation error. I get a NoUniqueBeanDefinitionException when I am requesting a page that returns a view item.
This is the error:
Caused by: org.springframework.beans.factory.NoUniqueBeanDefinitionException: No qualifying bean of type [org.springframework.transaction.PlatformTransactionManager] is defined: expected single matching bean but found 2: txManager,transactionManager
at org.springframework.beans.factory.support.DefaultListableBeanFactory.getBean(DefaultListableBeanFactory.java:365)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.getBean(DefaultListableBeanFactory.java:331)
at org.springframework.transaction.interceptor.TransactionAspectSupport.determineTransactionManager(TransactionAspectSupport.java:366)
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:271)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:96)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179)
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:653)
at com.mypackage.services.MyClassService$$EnhancerBySpringCGLIB$$9e8bf16f.registryEvents(<generated>)
at com.mypackage.controllers.MyClassSearchView.init(MyClassSearchView.java:75)
... 168 more
Is there a way to tell spring batch to use the data source defined in the persistence.xml of the other module or maybe is this caused by something else?
I created separate BatchScheduler java class as below and included it in BatchConfiguration java class. I am sharing both the classes. BatchConfiguration contains another jpaTransactionManager.
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean;
import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
#Configuration
#EnableScheduling
public class BatchScheduler {
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager resourcelessTransactionManager) throws Exception {
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(resourcelessTransactionManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
return launcher;
}
}
BatchConfiguration contains another jpaTransactionManager.
import java.io.IOException;
import java.util.Date;
import java.util.Properties;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.database.JpaItemWriter;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.env.Environment;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
import org.springframework.transaction.PlatformTransactionManager;
import trade.api.common.constants.Constants;
import trade.api.entity.SecurityEntity;
import trade.api.trade.batch.item.processor.SecurityItemProcessor;
import trade.api.trade.batch.item.reader.NseSecurityReader;
import trade.api.trade.batch.notification.listener.SecurityJobCompletionNotificationListener;
import trade.api.trade.batch.tasklet.SecurityReaderTasklet;
import trade.api.vo.SecurityVO;
#Configuration
#EnableBatchProcessing
#EnableScheduling
#Import({OhlcMonthBatchConfiguration.class, OhlcWeekBatchConfiguration.class, OhlcDayBatchConfiguration.class, OhlcMinuteBatchConfiguration.class})
public class BatchConfiguration {
private static final String OVERRIDDEN_BY_EXPRESSION = null;
/*
Load the properties
*/
#Value("${database.driver}")
private String databaseDriver;
#Value("${database.url}")
private String databaseUrl;
#Value("${database.username}")
private String databaseUsername;
#Value("${database.password}")
private String databasePassword;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
private JobLauncher jobLauncher;
#Bean
public TaskScheduler taskScheduler() {
return new ConcurrentTaskScheduler();
}
//second, minute, hour, day of month, month, day(s) of week
//#Scheduled(cron = "0 0 21 * * 1-5") on week days
#Scheduled(cron="${schedule.insert.security}")
public void importSecuritySchedule() throws Exception {
System.out.println("Job Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID",
String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = jobLauncher.run(importSecuritesJob(), param);
System.out.println("Job finished with status :" + execution.getStatus());
}
#Bean SecurityJobCompletionNotificationListener securityJobCompletionNotificationListener() {
return new SecurityJobCompletionNotificationListener();
}
//Import Equity OHLC End
//Import Equity Start
// tag::readerwriterprocessor[]
#Bean
public SecurityReaderTasklet securityReaderTasklet() {
return new SecurityReaderTasklet();
}
#Bean
#StepScope
public NseSecurityReader<SecurityVO> nseSecurityReader(#Value("#{jobExecutionContext["+Constants.SECURITY_DOWNLOAD_FILE+"]}") String pathToFile) throws IOException {
NseSecurityReader<SecurityVO> reader = new NseSecurityReader<SecurityVO>();
reader.setLinesToSkip(1);
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<SecurityVO>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "symbol", "nameOfCompany", "series", "dateOfListing", "paidUpValue", "marketLot", "isinNumber", "faceValue" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<SecurityVO>() {{
setTargetType(SecurityVO.class);
}});
}});
return reader;
}
#Bean
public SecurityItemProcessor processor() {
return new SecurityItemProcessor();
}
#Bean
public JpaItemWriter<SecurityEntity> writer() {
JpaItemWriter<SecurityEntity> writer = new JpaItemWriter<SecurityEntity>();
writer.setEntityManagerFactory(entityManagerFactory().getObject());
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
#Bean
public Job importSecuritesJob() throws IOException {
return jobBuilderFactory.get("importSecuritesJob")
.incrementer(new RunIdIncrementer())
.listener(securityJobCompletionNotificationListener())
.start(downloadSecurityStep())
.next(insertSecurityStep())
.build();
}
#Bean
public Step downloadSecurityStep() throws IOException {
return stepBuilderFactory.get("downloadSecurityStep")
.tasklet(securityReaderTasklet())
.build();
}
#Bean
public Step insertSecurityStep() throws IOException {
return stepBuilderFactory.get("insertSecurityStep")
.transactionManager(jpaTransactionManager())
.<SecurityVO, SecurityEntity> chunk(100)
.reader(nseSecurityReader(OVERRIDDEN_BY_EXPRESSION))
.processor(processor())
.writer(writer())
.build();
}
// end::jobstep[]
//Import Equity End
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(databaseDriver);
dataSource.setUrl(databaseUrl);
dataSource.setUsername(databaseUsername);
dataSource.setPassword(databasePassword);
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean lef = new LocalContainerEntityManagerFactoryBean();
lef.setPackagesToScan("trade.api.entity");
lef.setDataSource(dataSource());
lef.setJpaVendorAdapter(jpaVendorAdapter());
lef.setJpaProperties(new Properties());
return lef;
}
#Bean
public JpaVendorAdapter jpaVendorAdapter() {
HibernateJpaVendorAdapter jpaVendorAdapter = new HibernateJpaVendorAdapter();
jpaVendorAdapter.setDatabase(Database.MYSQL);
jpaVendorAdapter.setGenerateDdl(true);
jpaVendorAdapter.setShowSql(false);
jpaVendorAdapter.setDatabasePlatform("org.hibernate.dialect.MySQLDialect");
return jpaVendorAdapter;
}
#Bean
#Qualifier("jpaTransactionManager")
public PlatformTransactionManager jpaTransactionManager() {
return new JpaTransactionManager(entityManagerFactory().getObject());
}
#Bean
public static PropertySourcesPlaceholderConfigurer dataProperties(Environment environment) throws IOException {
String[] activeProfiles = environment.getActiveProfiles();
final PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer();
ppc.setLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:application-"+activeProfiles[0]+".properties"));
return ppc;
}
//// Import Security End
}
Problem solved. There was a PlatformTransactionManager bean located in an other configuration file. I set it as #Primary and now the problem is fixed. Thanks everyone for the help.

spring batch admin ui not showing jobs that are be configured

I have integrated spring boot with spring-batch-admin successfully. I used the sample guide from spring-batch for setting up jobs. I added couple of more tasklets to the jobs. my UI shows only one job. I expect to see three of them. I also the expect the three jobs to have start/stop functionalities and take job parameters from UI.
I have pushed entire code here.. please feel free to issue pull requests if you have solutions or improvements.
Here is my job.xml located in src/main/resources/batch/jobs/
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:batch="http://www.springframework.org/schema/batch"
xsi:schemaLocation="http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd">
<!-- This is the XML way to define jobs but it will be very handy if you already have jobs like this -->
<batch:job id="FirstJob" restartable="true">
<batch:step id="firstStep">
<batch:tasklet ref="firstTasklet" start-limit="1" />
</batch:step>
</batch:job>
<bean id="firstTasklet" class="hello.FirstTasklet">
<property name="property" value="${custom-property}" />
</bean>
</beans>
Here is my BatchConfiguration
package hello;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.jdbc.core.JdbcTemplate;
#Configuration
//#EnableBatchProcessing
public class BatchConfiguration {
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public DataSource dataSource;
// #Value("#{jobParameters['file']:sample-data.csv}")
// String filename;
// tag::readerwriterprocessor[]
#Bean
//#StepScope
public FlatFileItemReader<Person> reader() {
FlatFileItemReader<Person> reader = new FlatFileItemReader<Person>();
reader.setResource(new ClassPathResource("sample-data.csv"));
reader.setLineMapper(new DefaultLineMapper<Person>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "firstName", "lastName" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}});
}});
return reader;
}
#Bean
public PersonItemProcessor processor() {
return new PersonItemProcessor();
}
#Bean
public JdbcBatchItemWriter<Person> writer() {
JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<Person>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)");
writer.setDataSource(dataSource);
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
#Bean
public Job importUserJob(JobCompletionNotificationListener listener) {
return jobBuilderFactory.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(step1())
.end()
.build();
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1")
.<Person, Person> chunk(10)
.reader(reader())
.processor(processor())
.writer(writer())
.build();
}
// end::jobstep[]
#Bean
#StepScope
public FailableTasklet tasklet(#Value("#{jobParameters[fail]}") Boolean failable) {
if(failable != null) {
return new FailableTasklet(failable);
}
else {
return new FailableTasklet(false);
}
}
public static class FailableTasklet implements Tasklet {
private final boolean fail;
public FailableTasklet(boolean fail) {
this.fail = fail;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
System.out.println("Tasklet was executed");
if(fail) {
throw new RuntimeException("This exception was expected");
}
else {
return RepeatStatus.FINISHED;
}
}
}
}
screenshot of UI
Spring Batch Admin UI displays only the Jobs. You wouldn't see steps/tasklets in the UI as they cannot be run individually. But after you run your job, you could see the stats of each step that was performed in that job. Hope this helps.

Resources