How to make multiple writer execute parallely in spring batch - spring

Below code is my existing code wherein it is processing and writing data into multiple collections one after another.
My requirement is i want to write into multiple collections but at the same time and not one after another .In short i want to do parallel writing process
Below is my existing code
public Job importSingleETLData(SingleETLJobListener listener, Step singleETLStep, HttpServletRequest request) {
return jobBuilderFactory.get("importSingleETLData")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(singleETLStep)
.end()
.build();
}
#Bean
public Step singleETLStep(MongoItemWriter<CompositeWriterData> writer, HttpServletRequest request) {
return stepBuilderFactory.get("singleETLStep")
// TODO: P3 chunk size configurable
.<UserInfo, CompositeWriterData>chunk(etlConfiguration.getBatchChunkSize())
.reader(reader(("#{jobParameters[profileId]}"))).faultTolerant().skipPolicy(readerSkipper())
.processor(processor(request,"#{jobParameters[executeProcessing]}"))
.listener(processorListener()).faultTolerant().skipPolicy(writerSkipper())
.writer(writer)
.build();
}```
```#Bean
#StepScope
public MongoItemReader<UserInfo> reader(#Value("#{jobParameters[profileId]}") String profileId) {
String query = "{'results._id' :'"+profileId + "'}";
Map<String, Direction> sorts = new HashMap<>();
sorts.put("_id", Direction.ASC);
MongoItemReader<UserInfo> reader = new MongoItemReader<>();
reader.setCollection(CommonConstants.USER_INFO_VIEW);
reader.setTemplate(secondaryMongoTemplate);
reader.setTargetType(UserInfo.class);
// TODO: P2 take latest phi only
// TODO: P2 Use different query in on demand to fetch last processed record if nothing is updated from when last process ran
reader.setQuery(query);
reader.setSort(sorts);
return reader;
}
#Bean
#StepScope
public ETLDataProcessor processor(HttpServletRequest request,#Value("#{jobParameters[executeProcessing]}") String executeProcessing) {
return new ETLDataProcessor(request,executeProcessing);
}
#Bean
public MongoItemWriter<ProfileRecommendationInfo> recommendationsDataWriter() {
MongoItemWriter<ProfileRecommendationInfo> writer = new MongoItemWriter<>();
writer.setTemplate(secondaryMongoTemplate);
return writer;
}
#Bean
public MongoItemWriter<ProfileLifebandInfo> lifeBandDataWriter() {
MongoItemWriter<ProfileLifebandInfo> writer = new MongoItemWriter<>();
writer.setTemplate(secondaryMongoTemplate);
return writer;
}
#Bean
public MongoItemWriter<Profile> profileWriter() {
MongoItemWriter<Profile> writer = new MongoItemWriter<>();
writer.setTemplate(secondaryMongoTemplate);
return writer;
}
#Bean
public MongoItemWriter<CompositeWriterData> compositeMongoWriter() {
CompositeMongoItemWriter compositeWriter = new CompositeMongoItemWriter();
compositeWriter.setTemplate(secondaryMongoTemplate);
return compositeWriter;
}
#Bean
public SingleETLProcessorListener processorListener() {
return new SingleETLProcessorListener();
}
#Bean
public SkipPolicy readerSkipper() {
return new ReaderSkipper();
}
#Bean
public SkipPolicy writerSkipper() {
return new WriterSkipper();
}
public class CompositeMongoItemWriter extends MongoItemWriter<CompositeWriterData> {
#Autowired
MongoItemWriter<ProfileRecommendationInfo> recommendationsDataWriter;
#Autowired
MongoItemWriter<ProfileLifebandInfo> lifeBandWriter;
#Autowired
private MongoTemplate secondaryMongoTemplate;
#Autowired
MongoItemWriter<Profile> profileWriter;
#Override
public void write(List<? extends CompositeWriterData> items) throws Exception {
if( items!= null && !items.isEmpty()) {
for(CompositeWriterData compositeWriterData : items) {
for( Entry<String, Object> collection : compositeWriterData.getCollectionsPOJODataMap().entrySet() ) {
MongoItemWriter mongoItemWriter = fetchMongoItemWriterObject(collection.getKey());
if(mongoItemWriter != null) {
mongoItemWriter.write(Arrays.asList(collection.getValue()));
}
// Below code will update Profile with profile_recommendation_id.primary key
if(CommonConstants.PROFILE_RECOMMENDATION_INFO.equals(collection.getKey())) {
ProfileRecommendationInfo profileRecommendationInfo = (ProfileRecommendationInfo) collection.getValue();
updateProfileWithSavedCollectionDataId(profileRecommendationInfo.getProfileId(),profileRecommendationInfo.getDataId());
}
}
}
}
}
/**
* This method will return an object of MongoItemWriter based on the collectionName
* passed to it on invocation.
*
* Note: - This method needs to be modified whenever new collection is added in
* ${etl.processor.collection.pojo} in utility-service-application.properties
* #return
*/
private MongoItemWriter fetchMongoItemWriterObject(String collectionName){
if(CommonConstants.PROFILE_RECOMMENDATION_INFO.equalsIgnoreCase(collectionName)) {
return recommendationsDataWriter;
}else if(CommonConstants.PROFILE_LIFEBAND_INFO.equalsIgnoreCase(collectionName)) {
return lifeBandWriter;
}
return null;
}
/**
* Below method will update profile. with the collection data primary key value
* This is useful in etl processing when we fetch last data saved for a particular user
* #throws Exception
*/
private void updateProfileWithSavedCollectionDataId(String profileId,String profileRecommendationInfoId) throws Exception {
Profile profile = secondaryMongoTemplate.findById(profileId, Profile.class);
profile.setProfileRecommendationInfoId(profileRecommendationInfoId);
profileWriter.write(Arrays.asList(profile));
}
}
How can i write into multiple collections but at the same time and not one after another .In short i want to do parallel writing process
We are trying to achieve what has been given as Parallel Processing in the below link
https://docs.spring.io/spring-batch/docs/current/reference/html/scalability.html

The CompositeItemWriter calls delegate writers in sequence. If you want to call delegate writers in parallel, you need a custom CompositeIemWriter that submits different write operations to a TaskExecutor for instance (see example here). However, you need to think about error handling and how to recover from them (ie retry/skip features).

Related

Adding custom logic in Odata Create Entity call in java code

In the project am using olingo 2.0.12 jar in the java code.
During the create Entity service call ,
Is there a way to check for which entity data insert requested and,
Alter column values / append new column values before data persisted?
Is there a way to add above?
Code snippet given below,
public class A extends ODataJPADefaultProcessor{
#Override
public ODataResponse createEntity(final PostUriInfo uriParserResultView, final InputStream content,
final String requestContentType, final String contentType) throws ODataJPAModelException,
ODataJPARuntimeException, ODataNotFoundException, EdmException, EntityProviderException {
// Need to check the entity name and need to alter/add column values
}
}
Yes one of the possible ways would be to create your own CustomODataJPAProcessor which extends ODataJPADefaultProcessor.
You will have to register this in JPAServiceFactory by overriding the method
#Override
public ODataSingleProcessor createCustomODataProcessor(ODataJPAContext oDataJPAContext) {
return new CustomODataJPAProcessor(this.oDataJPAContext);
}
Now Olingo will use CustomODataJPAProcessor which can implement the following code to check the entities and transform them if needed
Sample code of CustomODataJPAProcessor
public class CustomODataJPAProcessor extends ODataJPADefaultProcessor {
Logger LOG = LoggerFactory.getLogger(this.getClass());
public CustomODataJPAProcessor(ODataJPAContext oDataJPAContext) {
super(oDataJPAContext);
}
#Override
public ODataResponse createEntity(final PostUriInfo uriParserResultView, final InputStream content,
final String requestContentType, final String contentType) throws ODataException {
ODataResponse oDataResponse = null;
oDataJPAContext.setODataContext(getContext());
InputStream forwardedInputStream = content;
try {
if (uriParserResultView.getTargetEntitySet().getName().equals("Students")) {
LOG.info("Students Entity Set Executed");
if (requestContentType.equalsIgnoreCase(ContentType.APPLICATION_JSON.toContentTypeString())) {
#SuppressWarnings("deprecation")
JsonElement elem = new JsonParser().parse(new InputStreamReader(content));
Gson gson = new GsonBuilder().setFieldNamingPolicy(FieldNamingPolicy.UPPER_CAMEL_CASE).create();
Student s = gson.fromJson(elem, Student.class);
// Change some values
s.setStudentID("Test" + s.getStudentID());
forwardedInputStream = new ByteArrayInputStream(gson.toJson(s).getBytes());
}
}
Object createdJpaEntity = jpaProcessor.process(uriParserResultView, forwardedInputStream,
requestContentType);
oDataResponse = responseBuilder.build(uriParserResultView, createdJpaEntity, contentType);
} catch (JsonIOException | JsonSyntaxException e) {
throw new RuntimeException(e);
} finally {
close();
}
return oDataResponse;
}
}
In Summery
Register your custom org.apache.olingo.odata2.service.factory Code Link
Create your own CustomODataJPAProcessor Code Link
Override createCustomODataProcessor in JPAServiceFactory to use the custom processor Code Link

more than one 'primary' service instance suppliers found during load balancing (spring boot/cloud)

I'm currently updating from Spring boot 2.2.x to 2.6.x + legacy code, it's a big jump so there were multiple changes. I'm now running into a problem with load balancing through an api-gateway. I'll apologize in advance for the wall of code to come. I will put the point of failure at the bottom.
When I send in an API request, I get the following error:
more than one 'primary' bean found among candidates: [zookeeperDiscoveryClientServiceInstanceListSupplier, serviceInstanceListSupplier, retryAwareDiscoveryClientServiceInstanceListSupplier]
it seems that the zookeeperDiscovery and retryAware suppliers are loaded through the default serviceInsatnceListSupplier, which has #Primary over it. I thought would take precedence over the other ones. I assume I must be doing something wrong due changes in the newer version, here are the relevant code in question:
#Configuration
#LoadBalancerClients(defaultConfiguration = ClientConfiguration.class)
public class WebClientConfiguration {
#Bean
#Qualifier("microserviceWebClient")
#ConditionalOnMissingBean(name = "microserviceWebClient")
public WebClient microserviceWebClient(#Qualifier("microserviceWebClientBuilder") WebClient.Builder builder) {
return builder.build();
}
#Bean
#Qualifier("microserviceWebClientBuilder")
#ConditionalOnMissingBean(name = "microserviceWebClientBuilder")
#LoadBalanced
public WebClient.Builder microserviceWebClientBuilder() {
return WebClient.builder();
}
#Bean
#Primary
public ReactorLoadBalancerExchangeFilterFunction reactorLoadBalancerExchangeFilterFunction(
ReactiveLoadBalancer.Factory<ServiceInstance> loadBalancerFactory) {
//the transformer is currently null, there wasn't a transformer before the upgrade
return new CustomExchangeFilterFunction(loadBalancerFactory, transformer);
}
}
There are also some Feign Client related configs here which I will omit, since it's not (or shouldn't be) playing a role in this problem:
public class ClientConfiguration {
/**
* The property key within the feign clients configuration context for the feign client name.
*/
public static final String FEIGN_CLIENT_NAME_PROPERTY = "feign.client.name";
public ClientConfiguration() {
}
//Creates a new BiPredicate for shouldClose. This will be used to determine if HTTP Connections should be automatically closed or not.
#Bean
#ConditionalOnMissingBean
public BiPredicate<Response, Type> shouldClose() {
return (Response response, Type type) -> {
if(type instanceof Class) {
Class<?> currentClass = (Class<?>) type;
return (null == AnnotationUtils.getAnnotation(currentClass, EnableResponseStream.class));
}
return true;
};
}
//Creates a Custom Decoder
#Bean
public Decoder createCustomDecoder(
ObjectFactory<HttpMessageConverters> converters, BiPredicate<Response, Type> shouldClose
) {
return new CustomDecoder(converters, shouldClose);
}
#Bean
#Qualifier("loadBalancerName")
public String loadBalancerName(PropertyResolver propertyResolver) {
String name = propertyResolver.getProperty(FEIGN_CLIENT_NAME_PROPERTY);
if(StringUtils.hasText(name)) {
// we are in a feign context
return name;
}
// we are in a LoadBalancerClientFactory context
name = propertyResolver.getProperty(LoadBalancerClientFactory.PROPERTY_NAME);
Assert.notNull(name, "Could not find a load balancer name within the configuration context!");
return name;
}
#Bean
public ReactorServiceInstanceLoadBalancer reactorServiceInstanceLoadBalancer(
BeanFactory beanFactory, #Qualifier("loadBalancerName") String loadBalancerName
) {
return new CustomRoundRobinLoadBalancer(
beanFactory.getBeanProvider(ServiceInstanceListSupplier.class),
loadBalancerName
);
}
#Bean
#Primary
public ServiceInstanceListSupplier serviceInstanceListSupplier(
#Qualifier(
"filter"
) Predicate<ServiceInstance> filter, DiscoveryClient discoveryClient, Environment environment, #Qualifier(
"loadBalancerName"
) String loadBalancerName
) {
// add service name to environment if necessary
if(environment.getProperty(LoadBalancerClientFactory.PROPERTY_NAME) == null) {
StandardEnvironment wrapped = new StandardEnvironment();
if(environment instanceof ConfigurableEnvironment) {
((ConfigurableEnvironment) environment).getPropertySources()
.forEach(s -> wrapped.getPropertySources().addLast(s));
}
Map<String, Object> additionalProperties = new HashMap<>();
additionalProperties.put(LoadBalancerClientFactory.PROPERTY_NAME, loadBalancerName);
wrapped.getPropertySources().addLast(new MapPropertySource(loadBalancerName, additionalProperties));
environment = wrapped;
}
return new FilteringInstanceListSupplier(filter, discoveryClient, environment);
}
}
There was a change in the ExchangeFilter constructor, but as far as I can tell, it accepts that empty transformer,I don't know if it's supposed to:
public class CustomExchangeFilterFunction extends ReactorLoadBalancerExchangeFilterFunction {
private static final ThreadLocal<ClientRequest> REQUEST_HOLDER = new ThreadLocal<>();
//I think it's wrong but I don't know what to do here
private static List<LoadBalancerClientRequestTransformer> transformersList;
private final Factory<ServiceInstance> loadBalancerFactory;
public CustomExchangeFilterFunction (Factory<ServiceInstance> loadBalancerFactory) {
this(loadBalancerFactory);
///according to docs, but I don't know where and if I need to use this
#Bean
public LoadBalancerClientRequestTransformer transformer() {
return new LoadBalancerClientRequestTransformer() {
#Override
public ClientRequest transformRequest(ClientRequest request, ServiceInstance instance) {
return ClientRequest.from(request)
.header(instance.getInstanceId())
.build();
}
};
}
public CustomExchangeFilterFunction (Factory<ServiceInstance> loadBalancerFactory, List<LoadBalancerClientRequestTransformer> transformersList) {
super(loadBalancerFactory, transformersList); //the changed constructor
this.loadBalancerFactory = loadBalancerFactory;;
}
#Override
public Mono<ClientResponse> filter(ClientRequest request, ExchangeFunction next) {
// put the current request into the thread context - ugly, but couldn't find a better way to access the request within
// the choose method without reimplementing nearly everything
REQUEST_HOLDER.set(request);
try {
return super.filter(request, next);
} finally {
REQUEST_HOLDER.remove();
}
}
//used to be an override, but the function has changed
//code execution doesn't even get this far yet
protected Mono<Response<ServiceInstance>> choose(String serviceId) {
ReactiveLoadBalancer<ServiceInstance> loadBalancer = loadBalancerFactory.getInstance(serviceId);
if(loadBalancer == null) {
return Mono.just(new EmptyResponse());
}
ClientRequest request = REQUEST_HOLDER.get();
// this might be null, if the underlying implementation changed and this method is no longer executed in the same
// thread
// as the filter method
Assert.notNull(request, "request must not be null, underlying implementation seems to have changed");
return choose(loadBalancer, filter);
}
protected Mono<Response<ServiceInstance>> choose(
ReactiveLoadBalancer<ServiceInstance> loadBalancer,
Predicate<ServiceInstance> filter
) {
return Mono.from(loadBalancer.choose(new DefaultRequest<>(filter)));
}
}
There were pretty big changes in the CustomExchangeFilterFunction, but the current execution doesn't even get there. It fails here, in .getIfAvailable(...):
public class CustomRoundRobinLoadBalancer implements ReactorServiceInstanceLoadBalancer {
private static final int DEFAULT_SEED_POSITION = 1000;
private final ObjectProvider<ServiceInstanceListSupplier> serviceInstanceListSupplierProvider;
private final String serviceId;
private final int seedPosition;
private final AtomicInteger position;
private final Map<String, AtomicInteger> positionsForVersions = new HashMap<>();
public CustomRoundRobinLoadBalancer (
ObjectProvider<ServiceInstanceListSupplier> serviceInstanceListSupplierProvider,
String serviceId
) {
this(serviceInstanceListSupplierProvider, serviceId, new Random().nextInt(DEFAULT_SEED_POSITION));
}
public CustomRoundRobinLoadBalancer (
ObjectProvider<ServiceInstanceListSupplier> serviceInstanceListSupplierProvider,
String serviceId,
int seedPosition
) {
Assert.notNull(serviceInstanceListSupplierProvider, "serviceInstanceListSupplierProvider must not be null");
Assert.notNull(serviceId, "serviceId must not be null");
this.serviceInstanceListSupplierProvider = serviceInstanceListSupplierProvider;
this.serviceId = serviceId;
this.seedPosition = seedPosition;
this.position = new AtomicInteger(seedPosition);
}
#Override
// we have no choice but to use the raw type Request here, because this method overrides another one with this signature
public Mono<Response<ServiceInstance>> choose(#SuppressWarnings("rawtypes") Request request) {
//fails here!
ServiceInstanceListSupplier supplier = serviceInstanceListSupplierProvider
.getIfAvailable(NoopServiceInstanceListSupplier::new);
return supplier.get().next().map((List<ServiceInstance> instances) -> getInstanceResponse(instances, request));
}
}
Edit: after some deeper stacktracing, it seems that it does go into the CustomFilterFunction and invokes the constructor with super(loadBalancerFactory, transformer)
I found the problem or a workaround. I was using #LoadBalancerClients because I thought it would just set the same config for all clients that way (even if I technically only have one atm). I changed it to ##LoadBalancerClient and it suddenly worked. I don't quite understand why this made a difference but it did!

Spring Boot (v2.3.1): Spring Batch Pass Data From One Step to Another

I'm fairly new to Spring Batch and I would like to know if the following is possible. I am creating a batch job that requires some initial data queried from a database. So would be something like the following be possible. Below is an item reader the first step in the job execution. This initial step will query all FOOs:
#Bean
public JdbcCursorItemReader fooItemReader() {
return
new JdbcCursorItemReaderBuilder()
.dataSource(dataSource)
.sql(QueryConstants.ALL_FOOS)
.build();
}
#Bean(name = "step-one")
public Step stepOne() {
return stepBuilderFactory
.get("step-one")
.reader(fooItemReader())
...
.build();
}
The above cursor reader and step would ideally pull all of the "FOO"s from a a database. Given the above, I would like to know if it is possible to take the data from step-one and pass it to a step-two to select all "BAR"s for the given "FOO" from the list of "FOO"s from step-one. Something like below:
#Bean
public JdbcCursorItemReader barsForFooItemReader() {
//some how for each foo in the list of FOOs from step-one retrieve it's BARs
return
new JdbcCursorItemReaderBuilder()
.dataSource(dataSource)
.sql(QueryConstants.BARS_FOR_FOO)
.build();
}
#Bean(name = "step-two")
public Step stepOne() {
return stepBuilderFactory
.get("step-one")
.reader(barsForFooItemReader())
...
.build();
}
Is something like this possible with Spring Batch? Is Spring Batch "right" for something like this? Any help would be most appreciated!!
UPDATE:
I looked into the suggested posted from the comments and they are not exactly what I'm looking for and leaves some unanswered questions. So I'm going to to attempt to clarify what I am trying to accomplish with Spring Batch by adding an example (below) of how I would implement the above without Spring Batch and using JdbcTemplate.
#Repository
public class FooBarRepository {
private final NamedParameterJdbcTemplate namedParameterJdbcTemplate;
#Autowired
public FooRepository(NamedParameterJdbcTemplate namedParameterJdbcTemplate) {
this.namedParameterJdbcTemplate = namedParameterJdbcTemplate;
}
/**
* Get all FOOs from database
* #return
*/
public List<String> getAllFoos() {
String query = "SELECT * FROM FOO;";
try {
return namedParameterJdbcTemplate.query(query, (resultSet, i) -> resultSet.getString("FOO_NAME"))';'
}
catch (DataAccessException ex) {
//... log exception
return Collections.emptyList();
}
}
/**
* Get all Bars for a given foo name
* #param fooName
* #return
*/
public List<String> getBarsForFoo(String fooName) {
String query = "SELECT * FROM BAR CROSS JOIN FOO WHERE FOO.NAME = :fooName";
try {
return namedParameterJdbcTemplate.query(query, new MapSqlParameterSource().addValue("fooName", fooName), (resultSet, i) -> resultSet.getString("BarFoo"));
}
catch (DataAccessException ex) {
//... log exception
return Collections.emptyList();
}
}
}
Above is a simple dao repo. There's a query for retrieving all FOOs and another for retrieving all Bars for a given fooName. Below is how I would would use both methods.
#Component
public class FooBarProcessor {
private final FooRepository fooRepository;
#Autowired
public FooBarProcessor(FooRepository fooRepository) {
this.fooRepository = fooRepository;
}
public void processFooBars() {
List<String> foos = fooRepository.getAllFoos();
Map<String, List<String>> foobars = new HashMap<>();
foos.forEach(foo -> {
List<String> bars = fooRepository.getBarsForFoo(foo);
foobars.put(foo, bars);
});
}
}
In the above, I get a list of all the FOOs and then for each FOO I retrieve it's BARs. (I hope the above makes sense logically)
I want to accomplish something similar with Spring Batch as I expect the data set to be quite large.

Spring Boot Background Job

I am using Spring Boot with Thymeleaf.
When user click on related button, it sends post requst and in the related controller method there is a function which takes 20 mins. This function does not return a value.
I just want to process this function in background. When application comes to the this function's line, it should send parameters to this function and keep processing without waiting a return.
What is the best practice for this case?
Many thanks in advance.
UPDATE
My config class
#Configuration
#EnableAsync
public class SpringAsyncConfig implements AsyncConfigurer{
#Bean(name = "ocrThread-")
public Executor threadPoolTaskExecutor() {
return new ThreadPoolTaskExecutor();
}
#Override
public Executor getAsyncExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(2);
executor.setMaxPoolSize(2);
executor.setQueueCapacity(10);
executor.initialize();
return executor;
}
#Override
public AsyncUncaughtExceptionHandler getAsyncUncaughtExceptionHandler() {
// TODO Auto-generated method stub
return null;
}
}
Service class
#Service
public class OcrService {
#Async
public String treadliOcr(List<String> liste, String kok) throws
InterruptedException, IOException, TesseractException {
.....
}
}
Controller
#RequestMapping(value="/aktar", method= RequestMethod.POST)
public String aktar(#RequestParam("belgeAdi") String belgeAdi,
#RequestParam("evrakTurId") String evrakTurId,
#RequestParam("kategoriId") String kategoriId,
#RequestParam("belgeTurId") String belgeTurId,
#RequestParam("firmaId") String firmaId,
#RequestParam("projeId") String projeId,
#RequestParam("aciklama") String aciklama) throws InterruptedException, IOException, TesseractException{
Integer b = null;
Integer p = null;
String klasor = getInitYol();
String belgeOnAd = belgeAdi.substring(0, 14);
BelgeIsimleri belgeIsimleri = new BelgeIsimleri();
List<String> seciliListe = belgeIsimleri.seciliBelgeleriFiltrele(klasor, belgeOnAd);
for(String s:seciliListe){
File file = new File (getInitYol()+s);
if(file.renameTo(new File("D:\\Done\\"+s))){
file.delete();
System.out.println(s+"yi sildi");
}
}
OcrService ocr = new OcrService();
String result=ocr.treadliOcr(seciliListe,getInitYol());
System.out.println("Ocr dan döndü");
Integer et = Integer.valueOf(evrakTurId);
Integer k = Integer.valueOf(kategoriId);
if(null==belgeTurId || "".equals(belgeTurId)){
}else{
b = Integer.valueOf(belgeTurId);
}
Integer f = Integer.valueOf(firmaId);
if(null==projeId || "".equals(projeId)){
}else{
p = Integer.valueOf(projeId);
}
belgeRepo.save(new BelgeEntity(et,k ,b , f ,p ,aciklama, result,belgeOnAd));
return "redirect:/verigiris";
}
Spring provides annotation support for asynchronous method execution via #Async and #EnableAsync: https://spring.io/guides/gs/async-method/

RowMapper returns the list , but execute returned values returns the list size as 1?

please find below my sample code.The Row mapper returns a list. When printed it give me the size in the DB but when i check
(List) employeeDaomap .get("allEmployees") i get the list size as 1 , and entire rows as one item? why what is the wrong in implementation
Also Spring doc says not to use rs.next(), how do we get the list of
values from the DB
public class MyTestDAO extends StoredProcedure {
/** The log. */
static Logger log = Logger.getLogger(MyTestDAO.class);
private static final String SPROC_NAME = "TestSchema.PKG_Test.prc_get_employee_list";
TestRowMapper mapper=new TestRowMapper();
public MyTestDAO(DataSource dataSource){
super(dataSource, SPROC_NAME);
declareParameter(new SqlOutParameter("allEmployees", OracleTypes.CURSOR, mapper));
compile();
}
/**
* Gets the myemplist data from the DB
*
*/
public List<EmployeeDAO> getEmployeeList()
throws Exception {
Map<String,Object> employeeDaomap =new HashMap<String,Object>();
employeeDaomap =execute();
log.info("employeeDaomap after execute ="+employeeDaomap);
log.info("employeeDaomap after execute size ="+employeeDaomap.size()); // expected 1
List<EmployeeDAO> list = (List<EmployeeDAO>) employeeDaomap .get("allEmployees");
log.info("size of the list ="+list.size()); // need to get the size of the list ,
return list;
}
private Map<String, Object> execute() {
return super.execute(new HashMap<String, Object>());
}
}
public class TestRowMapper implements RowMapper<List<EmployeeDAO>> {
static Logger log = Logger.getLogger(TestRowMapper.class);
#Override
public List<EmployeeDAO> mapRow(ResultSet rs, int rowNum)
throws SQLException {
// TODO Auto-generated method stub
rs.setFetchSize(3000);
List<EmployeeDAO> responseItems = new ArrayList<EmployeeDAO>();
EmployeeDAO responseItem = null;
log.info("row num "+rowNum);
while (rs.next()) {
responseItem = new EmployeeDAO();
responseItem.setID(rs.getString("id"));
responseItem.setName(rs.getString("name"));
responseItem.setDesc(rs.getString("desc"));
responseItems.add(responseItem);
}
log.info("TestRowMapper items ="+responseItems);
return responseItems;
}
}
The solution is to use the implements ResultSetExtractor instead of RowMapper and provide implementation for extractData.
public class TestRowMapper implements ResultSetExtractor<List<EmployeeDAO>> {
static Logger log = Logger.getLogger(TestRowMapper.class);
#Override
public List<EMAccountResponse> extractData(ResultSet rs)
throws SQLException, DataAccessException {
rs.setFetchSize(3000);
List<EmployeeDAO> responseItems = new ArrayList<EmployeeDAO>();
EmployeeDAO responseItem = null;
log.info("row num "+rowNum);
while (rs.next()) {
responseItem = new EmployeeDAO();
responseItem.setID(rs.getString("id"));
responseItem.setName(rs.getString("name"));
responseItem.setDesc(rs.getString("desc"));
responseItems.add(responseItem);
}
log.info("TestRowMapper items ="+responseItems);
return responseItems;
}
}

Resources