How to configure lettuceConnectionFactory in springBoot version 1.5.15.RELEASE - spring-boot

I was trying to configure LettuceConnectionFactory in spring boot project with 1.5.15.RELEASE version.
This is my configuration file:
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.data.redis.connection.RedisSentinelConfiguration;
import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
#SpringBootApplication
public class RedisApplication {
private static final Logger LOGGER = LoggerFactory.getLogger(RedisApplication.class);
#Bean
public LettuceConnectionFactory lettuceConnectionFactory()
{
LOGGER.info("Setup lettuce config");
final LettuceConnectionFactory factory = new LettuceConnectionFactory();
return factory;
}
#Bean
RedisTemplate<String,User> redisTemplate()
{
RedisTemplate<String,User> redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(lettuceConnectionFactory());
return redisTemplate;
}
public static void main(String[] args) {
SpringApplication.run(RedisApplication.class, args);
}
}
I added the lettuce dependency in pom.xml
<dependency>
<groupId>biz.paluch.redis</groupId>
<artifactId>lettuce</artifactId>
<version>3.5.0.Final</version>
</dependency>
while starting project it was throwing error
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'lettuceConnectionFactory' defined in com.example.redis.RedisApplication: Post-processing of merged bean definition failed; nested exception is java.lang.NoClassDefFoundError: com/lambdaworks/redis/api/StatefulRedisConnection

Your dependency is wrong.
Try <dependency>
<groupId>io.lettuce</groupId>
<artifactId>lettuce-core</artifactId>
<version>${lettuce.version}</version>
</dependency>

Related

Issue in Kafka Spring boot test cases for AdminClient

I am writing unit test cases for below class . I'm trying to mock admin client , so that i can call the below method create topic. But getting null pointer exception.
#Service
public class TopicService {
private static final Logger LOG = LoggerFactory.getLogger(TopicService.class);
#Autowired
private AdminClient adminClient;
public void createTopic(Topic topic) throws ExecutionException, InterruptedException {
adminClient
.createTopics(Collections.singletonList(ServiceHelper.fromTopic(topic)))
.values()
.get(topic.getName())
.get();
}
}
The unit test case is as follows
package org.kafka.service;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.internals.KafkaFutureImpl;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kafka.model.Topic;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.*;
import java.util.concurrent.ExecutionException;
import static org.apache.kafka.clients.admin.AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.clients.admin.AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG;
import static org.apache.kafka.common.internals.Topic.GROUP_METADATA_TOPIC_NAME;
#Slf4j
#RunWith(SpringRunner.class)
#SpringBootTest(classes = {TopicService.class})
public class TopicServiceTest {
#Autowired
TopicService topicService;
#MockBean
AdminClient adminClient;
ListTopicsResult listTopicsResult;
KafkaFuture<Set<String>> future;
NewTopic newTopic;
Topic topic;
Collection<NewTopic> topicList;
CreateTopicsResult createTopicsResult;
Void t;
Map<String,KafkaFuture<Void>> futureMap;
private static final String TARGET_CONSUMER_GROUP_ID = "target-group-id";
private static final Map<String, Object> CONF = new HashMap<>();
#BeforeClass
public static void createAdminClient() {
try {
CONF.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
CONF.put(REQUEST_TIMEOUT_MS_CONFIG, 120000);
CONF.put("zookeeper.connect", "localhost:21891");
AdminClient adminClient = AdminClient.create(CONF);
} catch (Exception e) {
throw new RuntimeException("create kafka admin client error", e);
}
}
#Before
public void setUp(){
topicList = new ArrayList<>();
newTopic = new NewTopic("topic-7",1, (short) 1);
topicList.add(newTopic);
futureMap = new HashMap<>();
topic = new Topic();
topic.setName("topic-1");
}
#Test
public void createTopic() throws ExecutionException, InterruptedException {
Properties consumerProperties = new Properties();
Mockito.when(adminClient.createTopics(topicList))
.thenReturn(Mockito.mock(CreateTopicsResult.class));
Mockito.when(adminClient.createTopics(topicList).values())
.thenReturn(Mockito.mock(Map.class));
Mockito.when(adminClient.createTopics(topicList)
.values()
.get(GROUP_METADATA_TOPIC_NAME)).thenReturn(Mockito.mock(KafkaFutureImpl.class));
Mockito.when(adminClient.createTopics(topicList)
.values()
.get(GROUP_METADATA_TOPIC_NAME)
.get()).thenReturn(t);
topicService.createTopic(topic);
}
}
package org.kafka.config;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.kafka.reader.Kafka;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.Map;
#Component
public class AdminConfigurer {
#Autowired
private Kafka kafkaConfig;
#Bean
public Map<String, Object> kafkaAdminProperties() {
final Map<String, Object> configs = new HashMap<>();
configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getBootstrapServers());
if(kafkaConfig.getProperties().getSasl().getEnabled() && kafkaConfig.getSsl().getEnabled()) {
configs.put("sasl.mechanism", kafkaConfig.getProperties().getSasl().getMechanism());
configs.put("security.protocol", kafkaConfig.getProperties().getSasl().getSecurity().getProtocol());
configs.put("ssl.keystore.location", kafkaConfig.getSsl().getKeystoreLocation());
configs.put("ssl.keystore.password", kafkaConfig.getSsl().getKeystorePassword());
configs.put("ssl.truststore.location", kafkaConfig.getSsl().getTruststoreLocation());
configs.put("ssl.truststore.password", kafkaConfig.getSsl().getTruststorePassword());
configs.put("sasl.jaas.config", String.format(kafkaConfig.getJaasTemplate(),
kafkaConfig.getProperties().getSasl().getJaas().getConfig().getUsername(),
kafkaConfig.getProperties().getSasl().getJaas().getConfig().getPassword()));
configs.put("ssl.endpoint.identification.algorithm", "");
}
return configs;
}
#Bean
public AdminClient getClient() {
return AdminClient.create(kafkaAdminProperties());
}
}
I expected the below test case run successfully. But i'm getting below error.
java.lang.NullPointerException
at org.kafka.service.TopicService.createTopic(TopicService.java:57)
at org.kafka.service.TopicServiceTest.createTopic(TopicServiceTest.java:100)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
I'm using 2.7.1 spring version with following client dependency.
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.1.1</version>
<scope>test</scope>
<classifier>test</classifier>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>

Spring Boot/Hibernate: Not Found error when I text my code

I am trying to create a project in spring boot and when I go to test my app via Postman I get the following error:
{
"status": 404,
"error": "Not Found",
"message": "",
"path": "/ api / customers-list"
}
I explain in detail what the problems were.
I initially encountered the following error:
APPLICATION FAILED TO START
Description:
Cannot determine embedded database driver class for database type NONE
Action:
If you want an embedded database please put a supported one on the classpath. If you have database settings to be loaded from a particular profile you may need to active it (no profiles are currently active).
which I solved by simply inserting the following line in application.properties (I don't know if that's a good way, but it seems to have solved anyway):
spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
Subsequently I found the error above (ie that of not found). I checked carefully if there was any typo in entering the URL, I checked if I ran the right application and it seems that everything is correct but I am unable to interact with Postman due to Not Found.
How can I solve this problem? I hope someone will help me.
Code:
pom.xml
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
</dependencies>
Config.java
package pack_cap_Config;
import java.util.Properties;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.ComponentScans;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.hibernate5.HibernateTransactionManager;
import org.springframework.orm.hibernate5.LocalSessionFactoryBean;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
#Configuration
#EnableTransactionManagement
#EnableAutoConfiguration(exclude = { HibernateJpaAutoConfiguration.class})
#ComponentScans(value = { #ComponentScan("boot.entry"),
#ComponentScan("Model"),
#ComponentScan("Controller"),
#ComponentScan("DAO"),
#ComponentScan("Miscallaneous"),
#ComponentScan("Service")})
public class Config {
#Value("${db.driver}")
private String DB_DRIVER;
#Value("${db.password}")
private String DB_PASSWORD;
#Value("${db.url}")
private String DB_URL;
#Value("${db.username}")
private String DB_USERNAME;
#Value("${hibernate.dialect}")
private String HIBERNATE_DIALECT;
#Value("${hibernate.show_sql}")
private String HIBERNATE_SHOW_SQL;
#Value("${hibernate.hbm2ddl.auto}")
private String HIBERNATE_HBM2DDL_AUTO;
#Value("${entitymanager.packagesToScan}")
private String ENTITYMANAGER_PACKAGES_TO_SCAN;
#Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setPackagesToScan(ENTITYMANAGER_PACKAGES_TO_SCAN);
Properties hibernateProperties = new Properties();
hibernateProperties.put("hibernate.dialect", HIBERNATE_DIALECT);
hibernateProperties.put("hibernate.show_sql", HIBERNATE_SHOW_SQL);
hibernateProperties.put("hibernate.hbm2ddl.auto", HIBERNATE_HBM2DDL_AUTO);
sessionFactory.setHibernateProperties(hibernateProperties);
return sessionFactory;
}
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(DB_DRIVER);
dataSource.setUrl(DB_URL);
dataSource.setUsername(DB_USERNAME);
dataSource.setPassword(DB_PASSWORD);
return dataSource;
}
#Bean
public HibernateTransactionManager transactionManager() {
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactory().getObject());
return txManager;
}
#Bean
public InternalResourceViewResolver jspViewResolver() {
InternalResourceViewResolver resolver= new InternalResourceViewResolver();
resolver.setPrefix("/views/");
resolver.setSuffix(".jsp");
return resolver;
}
}
package pack_cap_Controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import pack_cap_Model.Customer;
import pack_cap_Service.CaP_Service;
#RestController
#CrossOrigin(origins="http://localhost:4200")
#RequestMapping(value="/api")
public class Controller {
#Autowired
private CaP_Service capservice;
#GetMapping("customers-list")
public List<Customer> allcustomers() {
return capservice.getCustomers();
}
application.properties
# Database
db.driver= com.mysql.cj.jdbc.Driver
db.url= jdbc:mysql://localhost:3306/acq
db.username=root
db.password=123456
# Hibernate
hibernate.dialect=org.hibernate.dialect.MySQL5Dialect
hibernate.show_sql=true
hibernate.hbm2ddl.auto=update
entitymanager.packagesToScan=pack_cap_Model
spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
I solved it, the problem was in the Spring Boot directory structure; I had wrong the structure of the different folders of the project.

Spring boot with spring batch and jpa -configuration

I have simple batch application, reading csv to postgres database.
I have uploaded the code in this below repo in bitbucket
https://github.com/soasathish/spring-batch-with-jpa.git
I have problems in configuring the writing to database using spring data JPA.
I am getting manage bean not found .issue.
This same jpa spring data configuration works in different project when i tried to integrate with spring batch it fails with manage bean not found.
The batch config has spring job
There is only one step
1) reader -read from csv files.
processor applies some rules on the files .. Drools
please run schema-postgresql.sql to setup database
WRITER USES THE SPRING DATA JPA TO WRITE TO DB
could one help
I have uploaded the code in this below repo in bitbucket
https://github.com/soasathish/spring-batch-with-jpa.git
i know its a minor issue , but any direction or help will be grateful
code for creating repo
=======================
package uk.gov.iebr.batch.config;
import static uk.gov.iebr.batch.config.AppProperties.DRIVER_CLASS_NAME;
import static uk.gov.iebr.batch.config.AppProperties.IEBR_DB_PASSWORD_KEY;
import static uk.gov.iebr.batch.config.AppProperties.IEBR_DB_URL_KEY;
import static uk.gov.iebr.batch.config.AppProperties.IEBR_DB_USER_KEY;
import java.util.Properties;
import javax.sql.DataSource;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
#Configuration
#PropertySource({"classpath:application.properties"})
#EnableJpaRepositories({"uk.gov.iebr.batch.repository"})
#EnableTransactionManagement
#ComponentScan(basePackages="uk.gov.iebr.batch.repository")
public class DataSourceConfiguration {
#Autowired
Environment env;
#Bean(name = "allsparkEntityMF")
public LocalContainerEntityManagerFactoryBean allsparkEntityMF() {
final LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean();
em.setDataSource(allsparkDS());
em.setPersistenceUnitName("allsparkEntityMF");
em.setPackagesToScan(new String[] { "uk.gov.iebr.batch"});
em.setPackagesToScan(new String[] { "uk.gov.iebr.batch.repository"});
em.setPersistenceProvider(new HibernatePersistenceProvider());
HibernateJpaVendorAdapter a = new HibernateJpaVendorAdapter();
em.setJpaVendorAdapter(a);
Properties p = hibernateSpecificProperties();
p.setProperty("hibernate.ejb.entitymanager_factory_name", "allsparkEntityMF");
em.setJpaProperties(p);
return em;
}
#Bean(name = "allsparkDS")
public DataSource allsparkDS() {
final DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(env.getProperty(DRIVER_CLASS_NAME));
dataSource.setUrl(env.getProperty(IEBR_DB_URL_KEY));
dataSource.setUsername(env.getProperty(IEBR_DB_USER_KEY));
dataSource.setPassword(env.getProperty(IEBR_DB_PASSWORD_KEY));
return dataSource;
}
#Bean
public Properties hibernateSpecificProperties(){
final Properties p = new Properties();
p.setProperty("hibernate.hbm2ddl.auto", env.getProperty("spring.jpa.hibernate.ddl-auto"));
p.setProperty("hibernate.dialect", env.getProperty("spring.jpa.hibernate.dialect"));
p.setProperty("hibernate.show-sql", env.getProperty("spring.jpa.show-sql"));
p.setProperty("hibernate.cache.use_second_level_cache", env.getProperty("spring.jpa.hibernate.cache.use_second_level_cache"));
p.setProperty("hibernate.cache.use_query_cache", env.getProperty("spring.jpa.hibernate.cache.use_query_cache"));
return p;
}
#Bean(name = "defaultTm")
public PlatformTransactionManager transactionManager() {
JpaTransactionManager txManager = new JpaTransactionManager();
txManager.setEntityManagerFactory(allsparkEntityMF().getObject());
return txManager;
}
}
Batch config file:
package uk.gov.iebr.batch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.PropertySource;
import uk.gov.iebr.batch.config.AllSparkDataSourceConfiguration;
import uk.gov.iebr.batch.config.DataSourceConfiguration;
import uk.gov.iebr.batch.dao.PersonDao;
import uk.gov.iebr.batch.model.Person;
import uk.gov.iebr.batch.step.Listener;
import uk.gov.iebr.batch.step.Processor;
import uk.gov.iebr.batch.step.Reader;
import uk.gov.iebr.batch.step.Writer;
#Configuration
#EnableBatchProcessing
//spring boot configuration
#EnableAutoConfiguration
//file that contains the properties
#PropertySource("classpath:application.properties")
#Import({DataSourceConfiguration.class, AllSparkDataSourceConfiguration.class})
public class BatchConfig {
private static final Logger log = LoggerFactory.getLogger(BatchConfig.class);
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
public PersonDao PersonDao;
#Autowired
public DataSourceConfiguration dataSourceConfiguration;
#Bean
public Job job() {
long startTime = System.currentTimeMillis();
log.info("START OF BATCH ========================================================================" +startTime);
return jobBuilderFactory.get("job").incrementer(new RunIdIncrementer())
//.listener(new Listener(PersonDao))
.flow(step1()).end().build();
}
#Bean
public Step step1() {
return stepBuilderFactory.get("step1").<Person, Person>chunk(10)
.reader(Reader.reader("tram-data.csv"))
.processor(new Processor()).writer(new Writer(PersonDao)).build();
}
}
Writer calls this PersonDaoImpl:
public class PersonDaoImpl implements PersonDao {
#Autowired
DataSourceConfiguration dataSource;
#Autowired
PersonRepository personrepo;
#Override
public void insert(List<? extends Person> Persons) {
personrepo.save(Persons);
}
}
Based on the code you provided and the stack trace in your comment.
It's complaining that it can't find a #Bean named entityManagerFactory.
The reason this is happening is because you are using #EnableJpaRepositories and the entityManagerFactoryRef property defaults to entityManagerFactory. This property defines the name of the #Bean for the EntityManagerFactory.
I think your application configuration is preventing the normal auto-configuration from spring-boot from being processed.
I would recommend removing the IEBRFileProcessApplication class and following this example for configuring your spring-boot application (you could use ServletInitializer if you want).
#SpringBootApplication
public class Application extends SpringBootServletInitializer {
#Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(Application.class);
}
public static void main(String[] args) throws Exception {
SpringApplication.run(Application.class, args);
}
}
I also can't really see a need for DataSourceConfiguration and AllSparkDataSourceConfiguration, so I would recommend removing them. If you really need to specify your own DataSource, let me know and I can provide an additional example.
Between the #SpringBootApplication and #EnableBatchProcessing annotations, everything that is necessary will be bootstrapped for you.
All you need on BatchConfig is #Configuration and #EnableBatchProcessing.
If you make these changes to simplify your code base, then your problems should disappear.
UPDATE:
I created a pull request located here https://github.com/soasathish/spring-batch-with-jpa/pull/1
Please take a look at the javadoc here for an explanation on how #EnableBatchProcessing works. http://docs.spring.io/spring-batch/apidocs/org/springframework/batch/core/configuration/annotation/EnableBatchProcessing.html

java.lang.NoClassDefFoundError: org/springframework/orm/hibernate4/SpringSessionContext

I am try to implement SpringMVC 4 and Hibernate 4 integration with annotation in my project but I am getting this error:
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'sessionFactory' defined in com.config.ApplicationContextConfig: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.hibernate.SessionFactory]: Factory method 'getSessionFactory' threw exception; nested exception is java.lang.NoClassDefFoundError: org/springframework/orm/hibernate4/SpringSessionContext
My Config file is::
import javax.sql.DataSource;
import java.util.Properties;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.hibernate4.HibernateTransactionManager;
import org.springframework.orm.hibernate4.LocalSessionFactoryBean;
import org.springframework.orm.hibernate4.LocalSessionFactoryBuilder;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.orm.hibernate4.SpringSessionContext;
import com.dao.UserDao;
import com.dao.UserDaoImpl;
import com.pojo.User;
#Configuration
#ComponentScan("com.config")
#EnableTransactionManagement
public class ApplicationContextConfig {
#Bean(name = "viewResolver")
public InternalResourceViewResolver getViewResolver() {
InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
viewResolver.setPrefix("/WEB-INF/views/");
viewResolver.setSuffix(".jsp");
return viewResolver;
}
#Bean(name = "dataSource")
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUrl("jdbc:mysql://localhost:3306/usersdb");
dataSource.setUsername("root");
dataSource.setPassword("");
return dataSource;
}
#Autowired
#Bean(name = "sessionFactory")
public SessionFactory getSessionFactory(DataSource dataSource) {
LocalSessionFactoryBuilder sessionBuilder = new LocalSessionFactoryBuilder(dataSource);
sessionBuilder.addAnnotatedClasses(User.class);
return sessionBuilder.buildSessionFactory();
}
private Properties getHibernateProperties() {
Properties properties = new Properties();
properties.put("hibernate.show_sql", "true");
properties.put("hibernate.dialect", "org.hibernate.dialect.MySQLDialect");
return properties;
}
#Autowired
#Bean(name = "transactionManager")
public HibernateTransactionManager getTransactionManager(
SessionFactory sessionFactory) {
HibernateTransactionManager transactionManager = new HibernateTransactionManager(
sessionFactory);
return transactionManager;
}
#Autowired
#Bean(name = "userDao")
public UserDao getUserDao(SessionFactory sessionFactory) {
return new UserDaoImpl(sessionFactory);
}
}
It appears that you need, but do not have spring-orm-4.3.0.RELEASE.jar on your classpath. (I have identified the 4.3.0 version, you may be using a different 4.X version).
If you are using Maven, add the appropriate dependency to your pom.xml.
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
<version>4.3.0.RELEASE</version>
<scope>runtime</scope>
</dependency>
If you are not using Maven, you can download the jar file here.
For information on setting the classpath see this section of the Java Tutorial.

Overriding Bean Configuration with annotations

Use case: I have my container configured via classpath scanning #ComponentScan. For my test configuration I need the ability to mock specific beans.
Due to the order of loading, beans loaded via classpath scan are not overriding properly when using #Configuration. The following code samples demonstrate the problem. BaseExample.java shows how it is possible to override beans via configuration. ScanExample.java shows that overriding a bean that was loaded via #ComponentScan is skipped (see final note).
A demo project is available on bitbucket.
// BaseExample.java
package com.glassworks.mock;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.google.common.base.Joiner;
public class BaseExample {
private static final Logger log = LoggerFactory.getLogger(BaseExample.class);
private static AnnotationConfigApplicationContext ctx;
public static void main(String args[]) {
ctx = new AnnotationConfigApplicationContext(Config.class, OverrideConfig.class);
String beans[] = ctx.getBeanDefinitionNames();
log.info("{} beans found: {}", beans.length, Joiner.on(",").join(beans));
for(String bean : beans) {
log.info("{}: {}", bean, ctx.getBean(bean));
}
}
#Configuration
public static class Config {
#Bean
public AccountDao accountDao() {
log.debug("Creating accountDao [Config]");
return new AccountDao();
}
}
#Configuration
public static class OverrideConfig {
#Bean
public Object accountDao() {
log.debug("Creating accountDao [OverrideConfig]");
return Mockito.mock(AccountDao.class);
}
}
}
Output:
21:05 INFO | com.glassworks.mock.BaseExample | accountDao: Mock for AccountDao, hashCode: 666537607
[// ScanExample.java
package com.glassworks.mock;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import com.google.common.base.Joiner;
public class ScanExample {
private static final Logger log = LoggerFactory.getLogger(ScanExample.class);
private static AnnotationConfigApplicationContext ctx;
public static void main(String args[]) {
ctx = new AnnotationConfigApplicationContext(Config.class, OverrideConfig.class);
String beans[] = ctx.getBeanDefinitionNames();
log.info("{} beans found: {}", beans.length, Joiner.on(",").join(beans));
for(String bean : beans) {
log.info("{}: {}", bean, ctx.getBean(bean));
}
}
#Configuration
#ComponentScan("com.glassworks.services")
public static class Config {
}
#Configuration
public static class OverrideConfig {
#Bean
public AccountDao accountDao() {
log.debug("Creating accountDao [OverrideConfig]");
return Mockito.mock(AccountDao.class);
}
}
}
Output:
21:08 INFO | com.glassworks.mock.ScanExample | accountDao: com.glassworks.services.AccountDao#48805ebb
// AccountDao.java
package com.glassworks.services;
import org.springframework.stereotype.Repository;
#Repository
public class AccountDao {
}
Note
Its worth noting on that with logging set to debug, Spring indicates that it is skipping over the definition. This appears to be a bug.
21:09 DEBUG | o.s.c.a.ConfigurationClassBeanDefinitionReader | Skipping loading bean definition for [BeanMethod:name=accountDao,declaringClass=com.glassworks.mock.ScanExample$OverrideConfig]: a definition for bean 'accountDao' already exists. This is likely due to an override in XML.

Resources