Singleton property on springboot - spring-boot

Can I create one singleton property in spring boot?
When I use this:
public class MessengerPlatformCallbackHandler {
#Scope(value = "singleton")
private Map<String, Object> conversationID = new HashMap<>();
I got the erro: #Scope not applicable to field
tks

You need to create it this way.
#Configuration
public class ConversationIDConfig {
#Bean
#Scope(value = "singleton")
public Map<String, Object> conversationId(){
private Map<String, Object> conversationID = new HashMap<>();
}
}
And later you can inject it where ever you want as below.
public class MessengerPlatformCallbackHandler {
#Autowired
private Map<String, Object> conversationID;
}

You need to create it this way.
#Configuration
public class ConversationIDConfig {
#Bean
public Map<String, Object> conversationId(){
return new HashMap<>();
}
}
And later you can inject it where ever you want as below.
public class MessengerPlatformCallbackHandler {
#Autowired
private Map<String, Object> conversationId;
}

Related

Simple embedded Kafka test example with spring boot

Edit FYI: working gitHub example
I was searching the internet and couldn't find a working and simple example of an embedded Kafka test.
My setup is:
Spring boot
Multiple #KafkaListener with different topics in one class
Embedded Kafka for test which is starting fine
Test with Kafkatemplate which is sending to topic but the
#KafkaListener methods are not receiving anything even after a huge sleep time
No warnings or errors are shown, only info spam from Kafka in logs
Please help me. There are mostly over configured or overengineered examples. I am sure it can be done simple.
Thanks, guys!
#Controller
public class KafkaController {
private static final Logger LOG = getLogger(KafkaController.class);
#KafkaListener(topics = "test.kafka.topic")
public void receiveDunningHead(final String payload) {
LOG.debug("Receiving event with payload [{}]", payload);
//I will do database stuff here which i could check in db for testing
}
}
private static String SENDER_TOPIC = "test.kafka.topic";
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, SENDER_TOPIC);
#Test
public void testSend() throws InterruptedException, ExecutionException {
Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
KafkaProducer<Integer, String> producer = new KafkaProducer<>(senderProps);
producer.send(new ProducerRecord<>(SENDER_TOPIC, 0, 0, "message00")).get();
producer.send(new ProducerRecord<>(SENDER_TOPIC, 0, 1, "message01")).get();
producer.send(new ProducerRecord<>(SENDER_TOPIC, 1, 0, "message10")).get();
Thread.sleep(10000);
}
Embedded Kafka tests work for me with below configs,
Annotation on test class
#EnableKafka
#SpringBootTest(classes = {KafkaController.class}) // Specify #KafkaListener class if its not the same class, or not loaded with test config
#EmbeddedKafka(
partitions = 1,
controlledShutdown = false,
brokerProperties = {
"listeners=PLAINTEXT://localhost:3333",
"port=3333"
})
public class KafkaConsumerTest {
#Autowired
KafkaEmbedded kafkaEmbeded;
#Autowired
KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
Before annotation for setup method
#Before
public void setUp() throws Exception {
for (MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry.getListenerContainers()) {
ContainerTestUtils.waitForAssignment(messageListenerContainer,
kafkaEmbeded.getPartitionsPerTopic());
}
}
Note: I am not using #ClassRule for creating embedded Kafka rather auto-wiring #Autowired embeddedKafka
#Test
public void testReceive() throws Exception {
kafkaTemplate.send(topic, data);
}
Hope this helps!
Edit: Test configuration class marked with #TestConfiguration
#TestConfiguration
public class TestConfig {
#Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(KafkaTestUtils.producerProps(kafkaEmbedded));
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
KafkaTemplate<String, String> kafkaTemplate = new KafkaTemplate<>(producerFactory());
kafkaTemplate.setDefaultTopic(topic);
return kafkaTemplate;
}
Now #Test method will autowire KafkaTemplate and use is to send message
kafkaTemplate.send(topic, data);
Updated answer code block with above line
since the accepted answer doesn't compile or work for me. I find another solution based on https://blog.mimacom.com/testing-apache-kafka-with-spring-boot/ what I would like to share with you.
The dependency is 'spring-kafka-test' version: '2.2.7.RELEASE'
#RunWith(SpringRunner.class)
#EmbeddedKafka(partitions = 1, topics = { "testTopic" })
#SpringBootTest
public class SimpleKafkaTest {
private static final String TEST_TOPIC = "testTopic";
#Autowired
EmbeddedKafkaBroker embeddedKafkaBroker;
#Test
public void testReceivingKafkaEvents() {
Consumer<Integer, String> consumer = configureConsumer();
Producer<Integer, String> producer = configureProducer();
producer.send(new ProducerRecord<>(TEST_TOPIC, 123, "my-test-value"));
ConsumerRecord<Integer, String> singleRecord = KafkaTestUtils.getSingleRecord(consumer, TEST_TOPIC);
assertThat(singleRecord).isNotNull();
assertThat(singleRecord.key()).isEqualTo(123);
assertThat(singleRecord.value()).isEqualTo("my-test-value");
consumer.close();
producer.close();
}
private Consumer<Integer, String> configureConsumer() {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("testGroup", "true", embeddedKafkaBroker);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
Consumer<Integer, String> consumer = new DefaultKafkaConsumerFactory<Integer, String>(consumerProps)
.createConsumer();
consumer.subscribe(Collections.singleton(TEST_TOPIC));
return consumer;
}
private Producer<Integer, String> configureProducer() {
Map<String, Object> producerProps = new HashMap<>(KafkaTestUtils.producerProps(embeddedKafkaBroker));
return new DefaultKafkaProducerFactory<Integer, String>(producerProps).createProducer();
}
}
I solved the issue now
#BeforeClass
public static void setUpBeforeClass() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
System.setProperty("spring.cloud.stream.kafka.binder.zkNodes", embeddedKafka.getZookeeperConnectionString());
}
while I was debugging, I saw that the embedded kaka server is taking a random port.
I couldn't find the configuration for it, so I am setting the kafka config same as the server. Looks still a bit ugly for me.
I would love to have just the #Mayur mentioned line
#EmbeddedKafka(partitions = 1, controlledShutdown = false, brokerProperties = {"listeners=PLAINTEXT://localhost:9092", "port=9092"})
but can't find the right dependency in the internet.
In integration testing, having fixed ports like 9092 is not recommended because multiple tests should have the flexibility to open their own ports from embedded instances. So, following implementation is something like that,
NB: this implementation is based on junit5(Jupiter:5.7.0) and spring-boot 2.3.4.RELEASE
TestClass:
#EnableKafka
#SpringBootTest(classes = {ConsumerTest.Config.class, Consumer.class})
#EmbeddedKafka(
partitions = 1,
controlledShutdown = false)
#TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class ConsumerTest {
#Autowired
private EmbeddedKafkaBroker kafkaEmbedded;
#Autowired
private KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;
#BeforeAll
public void setUp() throws Exception {
for (final MessageListenerContainer messageListenerContainer : kafkaListenerEndpointRegistry.getListenerContainers()) {
ContainerTestUtils.waitForAssignment(messageListenerContainer,
kafkaEmbedded.getPartitionsPerTopic());
}
}
#Value("${topic.name}")
private String topicName;
#Autowired
private KafkaTemplate<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> requestKafkaTemplate;
#Test
public void consume_success() {
requestKafkaTemplate.send(topicName, load);
}
#Configuration
#Import({
KafkaListenerConfig.class,
TopicConfig.class
})
public static class Config {
#Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
#Bean
public ProducerFactory<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> requestProducerFactory() {
final Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
return new DefaultKafkaProducerFactory<>(configProps);
}
#Bean
public KafkaTemplate<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> requestKafkaTemplate() {
return new KafkaTemplate<>(requestProducerFactory());
}
}
}
Listener Class:
#Component
public class Consumer {
#KafkaListener(
topics = "${topic.name}",
containerFactory = "listenerContainerFactory"
)
#Override
public void listener(
final ConsumerRecord<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> consumerRecord,
final #Payload Optional<Map<String, List<ImmutablePair<String, String>>>> payload
) {
}
}
Listner Config:
#Configuration
public class KafkaListenerConfig {
#Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
#Value(value = "${topic.name}")
private String resolvedTreeQueueName;
#Bean
public ConsumerFactory<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> resolvedTreeConsumerFactory() {
final Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, resolvedTreeQueueName);
return new DefaultKafkaConsumerFactory<>(props, new StringDeserializer(), new CustomDeserializer());
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> resolvedTreeListenerContainerFactory() {
final ConcurrentKafkaListenerContainerFactory<String, Optional<Map<String, List<ImmutablePair<String, String>>>>> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(resolvedTreeConsumerFactory());
return factory;
}
}
TopicConfig:
#Configuration
public class TopicConfig {
#Value(value = "${spring.kafka.bootstrap-servers}")
private String bootstrapAddress;
#Value(value = "${topic.name}")
private String requestQueue;
#Bean
public KafkaAdmin kafkaAdmin() {
Map<String, Object> configs = new HashMap<>();
configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
return new KafkaAdmin(configs);
}
#Bean
public NewTopic requestTopic() {
return new NewTopic(requestQueue, 1, (short) 1);
}
}
application.properties:
spring.kafka.bootstrap-servers=${spring.embedded.kafka.brokers}
This assignment is the most important assignment that would bind the embedded instance port to the KafkaTemplate and, KafkaListners.
Following the above implementation, you could open dynamic ports per test class and, it would be more convenient.

Spring boot register an instance as a bean

I am trying to register Datasource instance as a bean in java code(spring-boot project)
Here is what I wrote. (This code is not working.)
#Configuration
public class DatabaseConfig {
private Logger logger = Logger.getLogger(DatabaseConfig.class);
#Autowired
ApplicationContext context;
private Map<String, Map<String, String>> dsMap;
private Map<String, String> getTestDataSourceInfo () {
Map<String, String> ds = new HashMap<String, String> ();
ds.put("driverClassName", "com.mysql.jdbc.Driver");
ds.put("url", "jdbc:mysql://123.456.78.912:3306/test");
ds.put("username", "testuser");
ds.put("password", "testuser");
return ds;
}
public DatabaseConfig () {
this.dsMap = new HashMap<String, Map<String, String>>();
dsMap.put("sampleDs", getTestDataSourceInfo());
}
#PostConstruct
public void loadDataSource () {
logger.info("DS ================================ :: " + String.valueOf(this.dsMap));
this.dsMap.forEach((k,v) -> {
logger.info("value ========================== :: " + String.valueOf(v));
DataSource aSource = DataSourceBuilder.create()
.driverClassName(v.get("driverClassName"))
.url(v.get("url"))
.username(v.get("username"))
.password(v.get("password"))
.build();
// PROBLEM STARTS ..............
// Add datasource instance with name to context
context.getAutowireCapableBeanFactory().autowireBean(aSource);
});
}
}
Is there any proper way to register bean with an instance?
I could not find any fine samples for this.
FYI, What I have expected in above code is...
Spring boot application will read above class as a Configure
It will make an Java instance in its constructor
And it will add the instance as a bean to application context in loadDatasource method
However, it is not working. So I am curious about how to add an java instance as a bean to current Spring boot application context.
Actually, it could be more easier if I did not do this with DataSource.
Since, Spring-boot automatically do configure dataSource, I have to disable this settings first.
Here is what I did to achieve the goal
In #Configuration Class...
#Configuration
public class DatabaseConfig {
private Logger logger = Logger.getLogger(DatabaseConfig.class);
#Autowired
ApplicationContext context;
private Map<String, Map<String, String>> dsMap;
private Map<String, String> getTestDataSourceInfo () {
Map<String, String> ds = new HashMap<String, String> ();
ds.put("driverClassName", "${ your driverClassName }");
ds.put("url", "${ your url }");
ds.put("username", "${ your user }");
ds.put("password", "${ your password }");
return ds;
}
public DatabaseConfig () {
this.dsMap = new HashMap<String, Map<String, String>>();
dsMap.put("sampleDs1", getTestDataSourceInfo());
dsMap.put("sampleDs2", getTestDataSourceInfo());
}
#PostConstruct
public void loadDataSource () {
BeanDefinitionRegistry registry = (BeanDefinitionRegistry) context.getAutowireCapableBeanFactory();
this.dsMap.forEach((k,v) -> {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(BasicDataSource.class);
v.forEach((ds_key, ds_val) -> {
builder.addPropertyValue(ds_key, ds_val);
});
BeanDefinition def = builder.getBeanDefinition();
if(!registry.containsBeanDefinition(k)) registry.registerBeanDefinition(k, def);
});
}
}
In above class, I could add java instances to spring bean with BeanDefinitionRegistry and BeanDefinitionBuilder.
If this is just a bean, it would be end here, but what you are trying to add is DataSource bean, have to do some extra work.
Since, boot automatically setting DataSource, we have to disable that setting to register customized datasource.
In your #SpringbootApplication class, add #EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class}).
Now, you are able to user those bean in other beans via #Autowired and #Qualifier.
Thanks.

spring boot and freemarker configure TemplateDirectiveModel and TemplateMethodModelEx

I have a project built with Spring Boot + FreeMarker, and it worked fine until tonight, but I don't think I had changed anything; however it failed. Below is my FreeMarker configuration class:
#Configuration
#Slf4j
public class FreemarkerConfiguration extends FreeMarkerAutoConfiguration.FreeMarkerWebConfiguration {
/**
* autowired all implementations of freemarker.template.TemplateDirectiveModel
*/
#Autowired
Map<String, TemplateDirectiveModel> directiveModelMap;
/**
* autowired all implementations of freemarker.template.TemplateMethodModelEx
*/
#Autowired
Map<String, TemplateMethodModelEx> methodModelExMap;
private static final String CUSTOM_DIRECTIVE_SUFFIX = "Directive";
private static final String CUSTOM_METHOD_SUFFIX = "Method";
#Override
public FreeMarkerConfigurer freeMarkerConfigurer() {
FreeMarkerConfigurer configurer = super.freeMarkerConfigurer();
Map<String, Object> sharedVariables = new HashMap<String, Object>();
if (!CollectionUtils.isEmpty(directiveModelMap)) {
Map<String, Object> map = new HashMap<String, Object>();
for (Map.Entry<String, TemplateDirectiveModel> entry : directiveModelMap.entrySet()) {
map.put(StringUtils.uncapitalize(entry.getKey()).replaceAll(CUSTOM_DIRECTIVE_SUFFIX, ""), entry.getValue());
}
sharedVariables.putAll(map);
}
if (!CollectionUtils.isEmpty(this.methodModelExMap)) {
Map<String, Object> map = new HashMap<String, Object>();
for (Map.Entry<String, TemplateMethodModelEx> entry : this.methodModelExMap.entrySet()) {
map.put(StringUtils.uncapitalize(entry.getKey()).replaceAll(CUSTOM_METHOD_SUFFIX, ""), entry.getValue());
}
sharedVariables.putAll(map);
}
BeansWrapper beansWrapper = new BeansWrapperBuilder(freemarker.template.Configuration.DEFAULT_INCOMPATIBLE_IMPROVEMENTS).build();
sharedVariables.put("enums", beansWrapper.getEnumModels());
configurer.setFreemarkerVariables(sharedVariables);
return configurer;
}
}
Problem is that
#Autowired
Map<String, TemplateDirectiveModel> directiveModelMap;
#Autowired
Map<String, TemplateMethodModelEx> methodModelExMap;
I want to inject all implementations of TemplateDirectiveModel and TemplateMethodModelEx , but both Map<String ,TemplateDirectiveModel/TemplateMethodModelEx> got null. Of course, the implementations annotated with #Compoment. I don't know why, I compared the diffs but got no answers, why the Maps instantiated after
#Override
public FreeMarkerConfigurer freeMarkerConfigurer(){ .... }
Here's my boot application
#Configuration
#SpringBootApplication
#EntityScan("com.hmxx.entity")
#EnableAspectJAutoProxy
#EnableTransactionManagement
#EnableJpaRepositories(value = {"com.hmxx.service"})
public class Application implements CommandLineRunner {
public static void main(String[] args) {
SpringApplication app = new SpringApplication(new Object[]{Application.class});
app.setWebEnvironment(true);
//app.setBannerMode(Banner.Mode.CONSOLE);
ConfigurableApplicationContext ctx = app.run(args);
Map<String, TemplateDirectiveModel> directiveModelMap = ctx.getBeansOfType(TemplateDirectiveModel.class);
Map<String, TemplateMethodModelEx> methodModelExMap = ctx.getBeansOfType(TemplateMethodModelEx.class);
}
#Autowired
DataInitService dataInitService;
#Override
public void run(String... args) throws Exception {
// dataInitService.initAdminUser();
}
}
And obviously Map<String, TemplateDirectiveModel>、Map<String, TemplateMethodModelEx> methodModelExMap both not null.
I want to know why the injection got null and hope to resolve it.

How can I enable MultiPartFeature?

My JAX-RS application has an extended Application class.
#ApplicationPath("/")
public class MyApplication extends Application {
// empty; really empty
}
How can I enable org.glassfish.jersey.media.multipart.MultiPartFeature without modifying the class? Or without the necessity of registering all resource classes/packages?
Not sure why you don't just use a ResourceConfig instead of an Application class. The only reason I can think of is portability, but the use of the Jersey specific multipart feature already breaks that portability.
But anyway, I'll try to answer this in the "most portable" way. What you can do is set a property, as you would in a web.xml. To set arbitrary properties, you can override
#Override
public Map<String, Object> getProperties() {}
in the Application subclass, and set the properties there.
#Override
public Map<String, Object> getProperties() {
Map<String, Object> props = new HashMap<>();
props.put("jersey.config.server.provider.classnames",
"org.glassfish.jersey.media.multipart.MultiPartFeature");
return props;
}
This will maintain the classpath scanning for your resources and providers. The scanning is only disabled if you override getClasses() or getSingletons() (and return non-empty sets), but getProperties() is fine.
Another Option:
Create a Feature to wrap that feature, and let the feature be discovered, as seen here
Personally, I would...
Just use a ResourceConfig, as you're already breaking portability (what's a little more breakage :-)
#ApplicationPath("/")
public class AppConfig extends ResourceConfig {
public AppConfig() {
packages("packages.to.scan");
register(MultiPartFeature.class);
}
}
For me worked like below:
final ResourceConfig resourceConfig = new ResourceConfig(ApplicationConfig.class);
resourceConfig.packages("com.econorma.rest");
resourceConfig.register(MultiPartFeature.class);
ServletHolder jerseyServlet = new ServletHolder(new ServletContainer(resourceConfig));
This is ApplicationConfig class
#ApplicationPath("/")
public class ApplicationConfig extends Application {
#Override
public Set<Class<?>> getClasses() {
final Set<Class<?>> resources = new HashSet<Class<?>>();
resources.add(MultiPartFeature.class);
resources.add(EntryPoint.class);
return resources;
}
#Override
public Map<String, Object> getProperties() {
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("jersey.config.server.provider.packages", "com.econorma.rest");
return properties;
}
}

Cache with redis cache manager, redisTemplate and multiple serializers

I need to cache multiple types like:
public Country findCountry(String countryName)
and:
public List<Destination> findAllDestinations(String countryName)
I am using RedisCacheManager and RedisTemplate only support only one serializer.
It is solved now after some research.
change spring-data-redis to 1.4.2.RELEASE
extend RedisCacheManager with your class with cache map to serializer (cacheName->serializer) and caches names
overrides the getCache method(Cache getCache(String name)) and based on cache name, set the serializer name in the redis template
use your customized cache manager
Example -
public class CustomRedisCacheManager extends RedisCacheManager
{
public static final String CACHE_NAME_DEFAULT = "default";
public static final String CACHE_NAME_COUNTRY = "country";
public static final String CACHE_NAME_DESTINATIONS = "destinations";
private Map<String, RedisCache> redisCaches = new HashMap<>();
public CustomRedisCacheManager(Map<String, RedisTemplate> redisTemplates)
{
super(redisTemplates.get(CACHE_NAME_DEFAULT), redisTemplates.keySet());
redisTemplates.keySet().stream().forEach(cacheName -> redisCaches.put(cacheName, new RedisCache(cacheName, null, redisTemplates.get(cacheName), 0)));
}
#Override
public Cache getCache(String cacheName)
{
return redisCaches.get(cacheName);
}
}
#Configuration
#EnableCaching
public class RedisConfiguration extends CachingConfigurerSupport
{
#Bean
public JedisConnectionFactory jedisConnectionFactory()
{
JedisConnectionFactory factory = new JedisConnectionFactory();
factory.setHostName(redisHostName);
factory.setPort(redisPort);
factory.setTimeout(100);
return factory;
}
#Bean
public CacheManager cacheManager()
{
Map<String, RedisTemplate> templates = new HashMap<>();
templates.put(CACHE_NAME_DEFAULT, getDefaultRedisTemplate());
templates.put(CACHE_NAME_COUNTRY, getMetadataRedisTemplate());
templates.put(CACHE_NAME_DESTINATIONS, getDestinationsRedisTemplate());
SabreRedisCacheManager sabreRedisCacheManager = new SabreRedisCacheManager(templates);
return sabreRedisCacheManager;
}
#Bean
public RedisTemplate<Object, Object> getDefaultRedisTemplate()
{
return getBaseRedisTemplate();
}
#Bean
public RedisTemplate<Object, Object> getCountryRedisTemplate()
{
RedisTemplate<Object, Object> redisTemplate = getBaseRedisTemplate();
redisTemplate.setValueSerializer(jsonRedisSerializer(Country.class));
return redisTemplate;
}
#Bean
public RedisTemplate<Object, Object> getDestinationsRedisTemplate()
{
RedisTemplate<Object, Object> redisTemplate = getBaseRedisTemplate();
redisTemplate.setValueSerializer(jsonRedisSerializer(TypeFactory.defaultInstance().constructCollectionType(List.class, Destination.class)));
return redisTemplate;
}
private RedisTemplate<Object, Object> getBaseRedisTemplate()
{
RedisTemplate<Object, Object> redisTemplate = new RedisTemplate<>();
redisTemplate.setConnectionFactory(jedisConnectionFactory());
redisTemplate.setKeySerializer(stringRedisSerializer());
redisTemplate.setHashKeySerializer(stringRedisSerializer());
redisTemplate.setValueSerializer(jsonRedisSerializer(Object.class));
return redisTemplate;
}
private Jackson2JsonRedisSerializer jsonRedisSerializer(Class type)
{
return jsonRedisSerializer(TypeFactory.defaultInstance().constructType(type));
}
private Jackson2JsonRedisSerializer jsonRedisSerializer(JavaType javaType)
{
Jackson2JsonRedisSerializer jackson2JsonRedisSerializer = new Jackson2JsonRedisSerializer(javaType);
jackson2JsonRedisSerializer.setObjectMapper(new JsonObjectMapper());
return jackson2JsonRedisSerializer;
}
}

Resources