'java.lang.NoSuchMethoderror in java application which uses ehcache for cache implementation - ehcache-2

I am facing below error when I run my java application which uses spring framework and ehcache for cache implementation. This application is build on IBM Integration Bus which Java and spring framework.
The full error stack is as below
+BIP2230E (Msg 2/4) MQJLMBRK ARCI511 147 ERROR DETECTED WHILST PROCESSI
NG A MESSAGE IN NODE 'ARFAL_MobilePortProtection01.FailureHandler.Tran
sform Failure'.
+BIP4367E (Msg 3/4) MQJLMBRK ARCI511 147 THE METHOD 'evaluate' IN JAVA
NODE 'FailureHandler.Transform Failure' HAS THROWN THE FOLLOWING EXCEP
TION: java.lang.NoSuchMethodError: org/ehcache/jcache/JCacheManager.ge
tEhCacheNativeCacheManager()Lnet/sf/ehcache/CacheManager;.
+BIP4395E (Msg 4/4) MQJLMBRK ARCI511 147 JAVA EXCEPTION: 'java.lang.NoS
uchMethodError'; THROWN FROM CLASS NAME: 'com.anz.common.cache.impl.Lo
calCacheHandler', METHOD NAME: 'getCacheManager', FILE: 'LocalCacheHan
dler.java', LINE: '99'
The code where error has triggered is as below
/**
*
*/
package com.anz.common.cache.impl;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.net.MalformedURLException;
import java.net.URL;
import javax.cache.Cache;
import javax.cache.configuration.MutableConfiguration;
import javax.management.MBeanServer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.ehcache.jcache.JCacheManager;
import net.sf.ehcache.CacheException;
import net.sf.ehcache.management.ManagementService;
/**
* ehCache Cache Handler in JCashe JSR107 standard API
* Cache Handler Factory -> Cache Handler -> Caching Provider -> Cache Manager -> Cache
* #author sanketsw
*
*/
public class LocalCacheHandler extends AbstractCacheHandler {
private static final Logger logger = LogManager.getLogger();
public static LocalCacheHandler _inst = null;
public LocalCacheHandler() throws Exception {
super();
}
/*public static LocalCacheHandler getInstance() throws Exception {
if (_inst == null) {
_inst = new LocalCacheHandler();
}
return _inst;
}*/
#Override
public String getDefaultCacheName() {
return "DefaultMap";
}
#Override
public String getCachingProviderName() {
return "org.ehcache.jcache.JCacheCachingProvider";
}
/* (non-Javadoc)
* #see com.anz.common.cache.impl.AbstractCacheHandler#getCache(java.lang.String)
*/
#Override
public Cache<String, String> getCache(String cacheName) throws CacheException, Exception {
Cache<String, String> cache = null;
try {
//logger.debug("Retriving cache {}", cacheName);
cache = cacheManager.getCache(cacheName);
} catch(Exception e) {
//logger.debug("Retriving cache using type classes {}", cacheName);
try {
cache = cacheManager.getCache(cacheName, String.class, String.class);
}catch(Exception e2) {
logger.throwing(e2);
}
}
if (cache == null) {
//logger.debug("Starting cache {}", cacheName);
MutableConfiguration<String, String> jcacheConfig = new MutableConfiguration<String, String>();
jcacheConfig.setTypes(String.class, String.class);
cache = cacheManager.createCache(cacheName, jcacheConfig);
}
return cache;
}
/* (non-Javadoc)
* #see com.anz.common.cache.impl.AbstractCacheHandler#getCacheManager()
*/
#Override
public javax.cache.CacheManager getCacheManager() throws Exception {
javax.cache.CacheManager ret = super.getCacheManager();
try {
// Register for JMX management
JCacheManager ehCacheManager = (JCacheManager)ret;
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
ehCacheManager.getEhCacheNativeCacheManager().setName("LocalCacheManager");
//logger.info("printing mBeanServer {}", mBeanServer);
//logger.info("printing EhCacheNativeCacheManager {}", ehCacheManager.getEhCacheNativeCacheManager());
ManagementService.registerMBeans(ehCacheManager.getEhCacheNativeCacheManager(), mBeanServer, true, true, true, true);
}catch(Exception e) {
//logger.info("net.sf.ehcache:type=CacheManager,name=LocalCacheManager is already registered for JMX management. Ignoring...");
//logger.info(e.getMessage());
}
return ret;
}
#Override
public String getCacheManagerURI() {
String path = System.getenv("CACHE_CONFIG");
//logger.info("System property CACHE_CONFIG={}",path);
File configFile = new File(path + "/" + "ehcache-localcache.xml");
if(configFile.exists()) {
try {
return configFile.toURI().toURL().toString();
} catch (MalformedURLException e) {
logger.throwing(e);
}
} else {
URL resource = LocalCacheHandler.class.getResource("ehcache-localcache.xml");
if(resource != null) {
//logger.warn("Loading a backup config file={}",resource);
return resource.toString();
}
}
//logger.warn("Could not load the resource {}", "ehcache-localcache.xml");
return null;
}
}
Any clue what could be the issue.

Related

Message is not consumed by all consumers when network brokers is configured in ActiveMQ

I have 2 instances of my application on the same machine (although it could be on different machines as well) with two Tomcat instances with different ports and Apache ActiveMQ is embedded in the application.
I have configured a static network of brokers so that the message from one instance can be consumed by all other instance as well (each instance can be producer and consumer).
servlet:
package com.activemq.servlet;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import javax.jms.JMSException;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.activemq.ActiveMQStartup;
import com.activemq.MQPublisher;
import com.activemq.SendMsg;
import com.activemq.SendMsgToAllInstance;
import com.activemq.TestPublisher;
/**
* Servlet implementation class ActiveMQStartUpServlet
*/
#WebServlet(value = "/activeMQStartUpServlet", loadOnStartup = 1)
public class ActiveMQStartUpServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private ActiveMQStartup mqStartup = null;
private static final Map pooledPublishers = new HashMap();
#Override
public void init(ServletConfig config) throws ServletException {
System.out.println("starting servelt--------------");
super.init(config);
//Apache Active MQ Startup
mqStartup = new ActiveMQStartup();
mqStartup.startBrokerService();
}
#Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
System.out.println(req.getParameter("distributedMsg"));
String mqConfig = null;
String distributedMsg = req.getParameter("distributedMsg");
String simpleMsg = req.getParameter("simpleMsg");
if (distributedMsg != null && !distributedMsg.equals(""))
mqConfig = "distributedMsg";
else if (simpleMsg != null && !simpleMsg.equals(""))
mqConfig = "simpleMsg";
MQPublisher publisher = acquirePublisher(mqConfig);
try {
publisher.publish(mqConfig);
} catch (JMSException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
releasePublisher(publisher);
}
}
#SuppressWarnings("unchecked")
private void releasePublisher(MQPublisher publisher) {
if (publisher == null) return;
#SuppressWarnings("rawtypes")
LinkedList publishers;
TestPublisher poolablePublisher = (TestPublisher)publisher;
publishers = getPooledPublishers(poolablePublisher.getConfigurationName());
synchronized (publishers) {
publishers.addLast(poolablePublisher);
}
}
private MQPublisher acquirePublisher(String mqConfig) {
LinkedList publishers = getPooledPublishers(mqConfig);
MQPublisher publisher = getMQPubliser(publishers);
if (publisher != null) return publisher;
try {
if (mqConfig.equals("distributedMsg"))
return new TestPublisher(MQConfiguration.getConfiguration("distributedMsg"), new SendMsgToAllInstance());
else
return new TestPublisher(MQConfiguration.getConfiguration("simpleMsg"), new SendMsg());
}catch(Exception e){
e.printStackTrace();
}
return null;
}
private LinkedList getPooledPublishers(String mqConfig) {
LinkedList publishers = null;
publishers = (LinkedList) pooledPublishers.get(mqConfig);
if (publishers == null) {
synchronized(pooledPublishers) {
publishers = (LinkedList) pooledPublishers.get(mqConfig);
if (publishers == null) {
publishers = new LinkedList();
pooledPublishers.put(mqConfig, publishers);
}
}
}
return publishers;
}
private MQPublisher getMQPubliser(LinkedList publishers) {
synchronized (publishers) {
while (!publishers.isEmpty()) {
TestPublisher publisher = (TestPublisher)publishers.removeFirst();
return publisher;
}
}
return null;
}
}
Configuration:
package com.activemq.servlet;
import java.util.HashMap;
import java.util.Map;
import javax.jms.JMSException;
import javax.jms.Topic;
import javax.jms.TopicConnection;
import javax.jms.TopicConnectionFactory;
import javax.jms.TopicSession;
import org.apache.activemq.ActiveMQConnectionFactory;
import com.activemq.ActiveMQContext;
public class MQConfiguration {
private static final Map configurations = new HashMap();
private String mqConfig;
private String topicName;
private TopicConnection topicConnection = null;
private MQConfiguration(String mqConfig, String string, String string2) {
this.mqConfig = mqConfig;
try {
String topicFactoryConName = ActiveMQContext.getProperty(mqConfig);
this.topicName = (mqConfig.equals("distributedMsg") ? ActiveMQContext.getProperty("distributedTopic"):ActiveMQContext.getProperty("normalTopic"));
TopicConnectionFactory factory = (ActiveMQConnectionFactory) ActiveMQContext.getContext()
.lookup(topicFactoryConName);
this.topicConnection = factory.createTopicConnection();
this.topicConnection.start();
} catch (Exception e) {
System.out.println("error: " + e);
}
}
public static MQConfiguration getConfiguration(String mqConfig) {
if (mqConfig == null || "".equals(mqConfig)) {
throw new IllegalArgumentException("mqConfig is null or empty");
}
MQConfiguration config = null;
if (config != null) {
return config;
}
synchronized (configurations) {
config = (MQConfiguration) configurations.get(mqConfig);
if (config == null) {
config = new MQConfiguration(mqConfig, "userName", "userPassword");
}
configurations.put(mqConfig, config);
}
return config;
}
public String getMqConfig() {
return this.mqConfig;
}
public TopicSession createTopicSession(boolean isTransacted, int autoAcknowledge) throws JMSException {
if (this.topicConnection == null) {
IllegalStateException ise = new IllegalStateException("topic connection not configured");
throw ise;
}
return this.topicConnection.createTopicSession(isTransacted, autoAcknowledge);
}
public Topic getTopic() {
try {
return (Topic) ActiveMQContext.getContext().lookup(this.topicName);
} catch (Exception e) {
e.getMessage();
}
return null;
}
}
publisher:
package com.activemq;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.jms.Topic;
import javax.jms.TopicPublisher;
import javax.jms.TopicSession;
import com.activemq.servlet.MQConfiguration;
public class TestPublisher implements MQPublisher {
private final String configurationName;
private TopicSession topicSession = null;
private TopicPublisher topicPublisher = null;
public TestPublisher(MQConfiguration config, Object messageListener) throws JMSException {
if (config == null) {
throw new IllegalArgumentException("config == null");
}
Topic topic = config.getTopic();
this.configurationName = config.getMqConfig();
this.topicSession = config.createTopicSession(false, Session.AUTO_ACKNOWLEDGE);
this.topicPublisher = this.topicSession.createPublisher(topic);
MessageConsumer msgConsumer = this.topicSession.createConsumer(topic);
msgConsumer.setMessageListener((MessageListener) messageListener);
}
#Override
public void publish(String msg) throws JMSException {
this.topicPublisher.publish(createMessage(msg, this.topicSession));
}
private Message createMessage(String msg, Session session) throws JMSException {
TextMessage message = session.createTextMessage(msg);
return message;
}
public String getConfigurationName() {
return this.configurationName;
}
}
Consumer:
package com.activemq;
import javax.jms.Message;
import javax.jms.MessageListener;
public class SendMsgToAllInstance implements MessageListener {
#Override
public void onMessage(Message arg0) {
System.out.println("distributed message-------------");
// We have call to dao layer to to fetch some data and cached it
}
}
JNDI:activemq-jndi.properties
# JNDI properties file to setup the JNDI server within ActiveMQ
#
# Default JNDI properties settings
#
java.naming.factory.initial=org.apache.activemq.jndi.ActiveMQInitialContextFactory
java.naming.provider.url=tcp://localhost:61616
activemq.network.connector=static:(tcp://localhost:61620)
#activemq.network.connector=broker:(tcp://localhost:61619,network:static:tcp://localhost:61620)?persistent=false&useJmx=true
activemq.data.directory=data61619
activemq.jmx.port=1099
#
# Set the connection factory name(s) as well as the destination names. The connection factory name(s)
# as well as the second part (after the dot) of the left hand side of the destination definition
# must be used in the JNDI lookups.
#
connectionFactoryNames = distributedMsgFactory,simpleMsgFactory
topic.jms/distributedTopic=distributedTopic
topic.jms/normalTopic=normalTopic
distributedMsg=distributedMsgFactory
simpleMsg=simpleMsgFactory
distributedTopic=jms/distributedTopic
normalTopic=jms/normalTopic
ActiveMQStartup:
package com.activemq;
import java.net.URI;
import org.apache.activemq.broker.BrokerPlugin;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.TransportConnector;
import org.apache.activemq.broker.jmx.ManagementContext;
import org.apache.activemq.network.NetworkConnector;
import org.apache.activemq.security.JaasAuthenticationPlugin;
public class ActiveMQStartup {
private final String bindAddress;
private final String dataDirectory;
private BrokerService broker = new BrokerService();
protected final int numRestarts = 3;
protected final int networkTTL = 2;
protected final int consumerTTL = 2;
protected final boolean dynamicOnly = true;
protected final String networkBroker;
protected final String jmxPort;
public ActiveMQStartup() {
ActiveMQContext context = new ActiveMQContext();
context.loadJndiProperties();
bindAddress = ActiveMQContext.getProperty("java.naming.provider.url");
dataDirectory = ActiveMQContext.getProperty("activemq.data.directory");
networkBroker = ActiveMQContext.getProperty("activemq.network.connector");
jmxPort = ActiveMQContext.getProperty("activemq.jmx.port");
}
// Start activemq broker service
public void startBrokerService() {
try {
broker.setDataDirectory("../" + dataDirectory);
broker.setBrokerName(dataDirectory);
broker.setUseShutdownHook(true);
TransportConnector connector = new TransportConnector();
connector.setUri(new URI(bindAddress));
//broker.setPlugins(new BrokerPlugin[]{new JaasAuthenticationPlugin()});
ManagementContext mgContext = new ManagementContext();
if (networkBroker != null && !networkBroker.isEmpty()) {
NetworkConnector networkConnector = broker.addNetworkConnector(networkBroker);
networkConnector.setName(dataDirectory);
mgContext.setConnectorPort(Integer.parseInt(jmxPort));
broker.setManagementContext(mgContext);
configureNetworkConnector(networkConnector);
}
broker.setNetworkConnectorStartAsync(true);
broker.addConnector(connector);
broker.start();
} catch (Exception e) {
System.out.println("Failed to start Apache MQ Broker : " + e);
}
}
private void configureNetworkConnector(NetworkConnector networkConnector) {
networkConnector.setDuplex(true);
networkConnector.setNetworkTTL(networkTTL);
networkConnector.setDynamicOnly(dynamicOnly);
networkConnector.setConsumerTTL(consumerTTL);
//networkConnector.setStaticBridge(true);
}
// Stop broker service
public void stopBrokerService() {
try {
broker.stop();
} catch (Exception e) {
System.out.println("Unable to stop the ApacheMQ Broker service " + e);
}
}
}
I am starting the tomcat instance one by one and seeing the network connection between the broker is getting established.
When I am sending messge from instance1 or instance2(first time) it is consuming on that instance only, but when I am sending message from the second instance it is consumed by both;
Code in git: https://github.com/AratRana/ApacheActiveMQ
Could you point me where I am wrong?
Finally, I am able to do it. When I started the consumer during server startup then I am able to see the message consumer in all instances. So to achieve this the consumers needs to be started before publishing any message.

AWS lambda spring . Not able to load properties file

I have problem with loading the properties file where i reference the values from. Locally I am able to run it as expected. But AWS lambda function does not seem to work as expected since it is not able to laod properties file. Below is the handler written. I deploy MainHanlder.java function on lambda.
#Component
public class TestHandler implements RequestHandler<SNSEvent, Object> {
#Override
public String handleRequest(SNSEvent snsEvent, Context context) {
TestClient testClient = Application.getBean("pp",TestClient);
return null;
}
}
MainHandler.java
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import com.amazonaws.services.lambda.runtime.events.SNSEvent;
import com.test.lambda.ApplicationConfiguration;
public class MainHandler extends SpringRequestHandler<SNSEvent, Object> {
/**
* Here we create the Spring {#link ApplicationContext} that will
* be used throughout our application.
*/
private static final ApplicationContext context =
new AnnotationConfigApplicationContext(ApplicationConfiguration.class);
#Override
public ApplicationContext getApplicationContext() {
return context;
}
}
SpringRequestHandler.java
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import org.springframework.context.ApplicationContext;
#SuppressWarnings("unchecked")
public abstract class SpringRequestHandler<I, O> implements RequestHandler<I, O>, ApplicationContextProvider {
private final RequestHandler<I, O> handler;
public SpringRequestHandler() {
handler = getApplicationContext().getBean(RequestHandler.class);
}
#Override
public O handleRequest(final I input, final Context context) {
return (O) handler.handleRequest(input, context);
}
}
Application.java
public class Application {
private static final AnnotationConfigApplicationContext springContext = new AnnotationConfigApplicationContext();
private static boolean flag = Boolean.TRUE;
private static final XLogger logger = XLoggerFactory.getXLogger(Application.class);
public static <T> T getBean(String env, Class<T> clazz) {
InputStream i1 = null;
InputStream i2 = null;
if(flag) {
Properties rp = new Properties();
Properties ap = new Properties();
try {
System.out.println("print env " + env);
i1 = Application.class.getResourceAsStream(“/application-” + env + ".properties");
rp.load(i1);
i2 = Application.class.getResourceAsStream("/application-" + env + ".properties");
ap.load(i2);
PropertyPlaceholderConfigurer propertyOverrideConfigurer = new PropertyPlaceholderConfigurer();
propertyOverrideConfigurer.setPropertiesArray(new Properties[]{rp,ap});
springContext.scan(new String[]{"com.pinto.lambda"});
springContext.addBeanFactoryPostProcessor(propertyOverrideConfigurer);
try {
springContext.refresh();
}catch(IllegalStateException e) {
}
flag = Boolean.FALSE;
} catch (Exception e) {
logger.error("Exception in the Application - " +e.getMessage());
throw new RuntimeException("Unable to load properties " + e.getMessage());
}
return springContext.getBean(clazz);
}
ERROR TRACE - AWS CONSOLE LOGS
==================== FUNCTION OUTPUT ====================
{"errorMessage":"Unable to load properties null","errorType":"java.lang.RuntimeException","stackTrace":["com.pinto.lambda.Application.getBean(Application.java:65)","com.pinto.lambda.handler.GetWarehouseInventoryHandler.handleRequest(TestHandler.java:44)","com.pinto.lambda.handler.GetWarehouseInventoryHandler.handleRequest(TestHandler.java:1)","com.pinto.lambda.handler.SpringRequestHandler.handleRequest(SpringRequestHandler.java:19)"]}
==================== FUNCTION LOG OUTPUT ====================
print env pp
[ERROR] Exception in the Application - null
Unable to load properties null: java.lang.RuntimeException
java.lang.RuntimeException: Unable to load properties null
at com.pinto.lambda.Application.getBean(Application.java:65)
at com.pinto.lambda.handler.TestHandler.handleRequest(TestHandler.java:44)
at com.pinto.lambda.handler.TestHandler.handleRequest(TestHandler.java:1)
at com.pinto.lambda.handler.SpringRequestHandler.handleRequest(SpringRequestHandler.java:19)

How to use Apache CachingHttpAsyncClient with Spring AsyncRestTemplate?

Is it possible to use CachingHttpAsyncClient with AsyncRestTemplate? HttpComponentsAsyncClientHttpRequestFactory expects a CloseableHttpAsyncClient but CachingHttpAsyncClient does not extend it.
This is known as issue SPR-15664 for versions up to 4.3.9 and 5.0.RC2 - fixed in 4.3.10 and 5.0.RC3. The only way around is is creating a custom AsyncClientHttpRequestFactory implementation that is based on the existing HttpComponentsAsyncClientHttpRequestFactory:
// package required for HttpComponentsAsyncClientHttpRequest visibility
package org.springframework.http.client;
import java.io.IOException;
import java.net.URI;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.Configurable;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.client.cache.CacheConfig;
import org.apache.http.impl.client.cache.CachingHttpAsyncClient;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClients;
import org.apache.http.protocol.HttpContext;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.http.HttpMethod;
import org.springframework.util.Assert;
// TODO add support for other CachingHttpAsyncClient otpions, e.g. HttpCacheStorage
public class HttpComponentsCachingAsyncClientHttpRequestFactory extends HttpComponentsClientHttpRequestFactory implements AsyncClientHttpRequestFactory, InitializingBean {
private final CloseableHttpAsyncClient wrappedHttpAsyncClient;
private final CachingHttpAsyncClient cachingHttpAsyncClient;
public HttpComponentsCachingAsyncClientHttpRequestFactory() {
this(HttpAsyncClients.createDefault(), CacheConfig.DEFAULT);
}
public HttpComponentsCachingAsyncClientHttpRequestFactory(final CacheConfig config) {
this(HttpAsyncClients.createDefault(), config);
}
public HttpComponentsCachingAsyncClientHttpRequestFactory(final CloseableHttpAsyncClient client) {
this(client, CacheConfig.DEFAULT);
}
public HttpComponentsCachingAsyncClientHttpRequestFactory(final CloseableHttpAsyncClient client, final CacheConfig config) {
Assert.notNull(client, "HttpAsyncClient must not be null");
wrappedHttpAsyncClient = client;
cachingHttpAsyncClient = new CachingHttpAsyncClient(client, config);
}
#Override
public void afterPropertiesSet() {
startAsyncClient();
}
private void startAsyncClient() {
if (!wrappedHttpAsyncClient.isRunning()) {
wrappedHttpAsyncClient.start();
}
}
#Override
public ClientHttpRequest createRequest(final URI uri, final HttpMethod httpMethod) throws IOException {
throw new IllegalStateException("Synchronous execution not supported");
}
#Override
public AsyncClientHttpRequest createAsyncRequest(final URI uri, final HttpMethod httpMethod) throws IOException {
startAsyncClient();
final HttpUriRequest httpRequest = createHttpUriRequest(httpMethod, uri);
postProcessHttpRequest(httpRequest);
HttpContext context = createHttpContext(httpMethod, uri);
if (context == null) {
context = HttpClientContext.create();
}
// Request configuration not set in the context
if (context.getAttribute(HttpClientContext.REQUEST_CONFIG) == null) {
// Use request configuration given by the user, when available
RequestConfig config = null;
if (httpRequest instanceof Configurable) {
config = ((Configurable) httpRequest).getConfig();
}
if (config == null) {
config = createRequestConfig(cachingHttpAsyncClient);
}
if (config != null) {
context.setAttribute(HttpClientContext.REQUEST_CONFIG, config);
}
}
return new HttpComponentsAsyncClientHttpRequest(cachingHttpAsyncClient, httpRequest, context);
}
#Override
public void destroy() throws Exception {
try {
super.destroy();
} finally {
wrappedHttpAsyncClient.close();
}
}
}

Spring Websocket + Stomp + SockJs

I built a chat application using the portfolio websocket sample as a guide. I am using spring boot 1.3.3, ActiveMQ, STOMP and the UI is built with KnockoutJs and running on Windows 2012 server.
My issue is after appx 1000 connections to the chat server (spring boot) the server stop accepting any more connections.
I played with different heartbeat settings and also messages size settings etc to no avail.
Did anyone build such a chat / websocket application and is able to achieve more than 1000 concurrent connections?
I spent over a week researching, tweaking the code and I also changed the Windows 2012 server connection limit ( seem like 2012 removed TCP connection limit).
Any help or pointers will be greatly appreciated.
/**
*
*/
package com.test.chat;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.messaging.simp.config.ChannelRegistration;
import org.springframework.messaging.simp.config.MessageBrokerRegistry;
import org.springframework.messaging.simp.config.StompBrokerRelayRegistration;
import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer;
import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;
import org.springframework.web.socket.config.annotation.SockJsServiceRegistration;
import org.springframework.web.socket.config.annotation.StompEndpointRegistry;
import org.springframework.web.socket.config.annotation.WebSocketTransportRegistration;
import org.springframework.web.socket.server.standard.ServletServerContainerFactoryBean;
import com.test.chat.application.event.StompConnectEvent;
import com.test.chat.application.event.StompConnectedEvent;
import com.test.chat.application.event.StompDisconnectEvent;
/**
* #author pgobin
*
* https://www.youtube.com/watch?v=mmIza3L64Ic
*
*/
#Configuration
#EnableWebSocketMessageBroker
#ComponentScan("com.test.chat")
public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer {
private static final Logger log = Logger.getLogger(WebSocketConfig.class);
#Value("${StompBrokerRelay.host}")
String StompBrokerRelayHost;
#Value("${StompBrokerRelay.port}")
int StompBrokerRelayPort;
#Value("${MessageBroker.User}")
String brokerUser;
#Value("${MessageBroker.Password}")
String brokerPassword;
#Value("${MessageBrokerStompClient.User}")
String stompClientUser;
#Value("${MessageBrokerStompClient.Password}")
String stompClientPassword;
#Value("${sockjs.setHttpMessageCacheSize}")
int sockjs_setHttpMessageCacheSize;
#Value("${sockjs.setStreamBytesLimit}")
int sockjs_setStreamBytesLimit;
#Value("${sockjs.setDisconnectDelay:20000}")
int sockjs_setDisconnectDelay;
#Value("${sockjs.setHeartbeatTime:30000}")
int sockjs_setHeartbeatTime;
// WebSocketTransport settings
#Value("${WebSocketTransportRegistration.MessageSizeLimit:131072}")
int MessageSizeLimit;
#Value("${WebSocketTransportRegistration.SendTimeLimit:15000}")
int SendTimeLimit;
#Value("${WebSocketTransportRegistration.SendBufferSizeLimit:524288}")
int SendBufferSizeLimit;
// ClientOutboundChannel configs
#Value("${ClientOutboundChannel.corePoolSize:25}")
int ClientOutboundChannelcorePoolSize;
#Value("${ClientOutboundChannel.maxPoolSize:50}")
int ClientOutboundChannelmaxPoolSize;
// ClientInboundChannel configs
#Value("${ClientInboundChannel.corePoolSize:25}")
int ClientInboundChannelcorePoolSize;
#Value("${ClientInboundChannel.maxPoolSize:50}")
int ClientInboundChannelmaxPoolSize;
/****
*
*/
#Override
public void configureMessageBroker(MessageBrokerRegistry messageBrokerRegistry)
{
// Destination Prefix - Connect to default in-memory broker
// messageBrokerRegistry.enableSimpleBroker("/topic/", "/queue/");
// connect to AMQ
StompBrokerRelayRegistration broker = messageBrokerRegistry.enableStompBrokerRelay("/queue/", "/topic/");
broker.setRelayHost(StompBrokerRelayHost);
broker.setRelayPort(StompBrokerRelayPort);
broker.setSystemLogin(brokerUser);
broker.setSystemPasscode(brokerPassword);
broker.setClientLogin(stompClientUser);
broker.setClientPasscode(stompClientPassword);
// broker.setVirtualHost(virtualHost)
messageBrokerRegistry.setApplicationDestinationPrefixes("/app");
}
/*****
* https://github.com/rstoyanchev/spring-websocket-test/issues/4
*/
#Override
public void registerStompEndpoints(StompEndpointRegistry stompRegistry)
{
String wsOrigins = AppConfig.getEnv().getProperty("websocket.security.allow.origins", "http://localhost:8080");
log.info("#### ALLOWING MESSAGING ONLY FROM ORIGINS:" + wsOrigins + ". ALL OTHERS WILL BE BLOCKED ####");
String[] cors = StringUtils.split(AppConfig.getEnv().getProperty("websocket.security.allow.origins", "http://localhost:8080"), ",");
// WebSocket URL prefix
SockJsServiceRegistration reg = stompRegistry.addEndpoint("/chat").setAllowedOrigins(cors).withSockJS()
.setStreamBytesLimit(sockjs_setStreamBytesLimit).setDisconnectDelay(sockjs_setDisconnectDelay)
.setHttpMessageCacheSize(sockjs_setHttpMessageCacheSize).setHeartbeatTime(sockjs_setHeartbeatTime).setWebSocketEnabled(true)
.setSupressCors(false);
}
#Bean
public ServletServerContainerFactoryBean createWebSocketContainer()
{
ServletServerContainerFactoryBean container = new ServletServerContainerFactoryBean();
container.setMaxTextMessageBufferSize(8192);
container.setMaxBinaryMessageBufferSize(8192);
container.setAsyncSendTimeout(5000);
container.setMaxSessionIdleTimeout(600000);
return container;
}
#Override
public void configureWebSocketTransport(WebSocketTransportRegistration registration)
{
registration.setMessageSizeLimit(MessageSizeLimit);
registration.setSendTimeLimit(SendTimeLimit);
registration.setSendBufferSizeLimit(SendBufferSizeLimit);
}
/**
* Configure the {#link org.springframework.messaging.MessageChannel} used
* for outgoing messages to WebSocket clients. By default the channel is
* backed by a thread pool of size 1. It is recommended to customize thread
* pool settings for production use.
*/
#Override
public void configureClientOutboundChannel(ChannelRegistration registration)
{
registration.taskExecutor().corePoolSize(ClientOutboundChannelcorePoolSize).maxPoolSize(ClientOutboundChannelmaxPoolSize);
}
#Override
public void configureClientInboundChannel(ChannelRegistration registration)
{
registration.taskExecutor().corePoolSize(ClientInboundChannelcorePoolSize).maxPoolSize(ClientInboundChannelmaxPoolSize);
}
/***
* Intercepts a connect event
*
* #return
*/
#Bean
public StompConnectEvent presenceChannelInterceptorOnConnect(SimpMessagingTemplate messagingTemplate)
{
return new StompConnectEvent(messagingTemplate);
}
/*
* #Bean public StompConnectedEvent
* presenceChannelInterceptorOnConnected(SimpMessagingTemplate
* messagingTemplate) { return new StompConnectedEvent(messagingTemplate); }
*/
#Bean
public StompDisconnectEvent presenceChannelInterceptorOnDisconnect(SimpMessagingTemplate messagingTemplate)
{
return new StompDisconnectEvent(messagingTemplate);
}
}
And my client test code:
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.messaging.simp.stomp.ConnectionLostException;
import org.springframework.messaging.simp.stomp.StompCommand;
import org.springframework.messaging.simp.stomp.StompFrameHandler;
import org.springframework.messaging.simp.stomp.StompHeaders;
import org.springframework.messaging.simp.stomp.StompSession;
import org.springframework.messaging.simp.stomp.StompSessionHandlerAdapter;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.web.socket.WebSocketHttpHeaders;
import org.springframework.web.socket.client.standard.StandardWebSocketClient;
import org.springframework.web.socket.messaging.WebSocketStompClient;
import org.springframework.web.socket.sockjs.client.SockJsClient;
import org.springframework.web.socket.sockjs.client.Transport;
import org.springframework.web.socket.sockjs.client.WebSocketTransport;
public static void runTest(final long userUid, final int clientNum)
{
//String stompUrl = "ws://localhost:8080/chat";
ThreadPoolTaskScheduler taskScheduler = new ThreadPoolTaskScheduler();
taskScheduler.afterPropertiesSet();
StandardWebSocketClient webSocketClient = new StandardWebSocketClient();
List<Transport> transports = new ArrayList<>();
transports.add(new WebSocketTransport(webSocketClient));
SockJsClient sockJsClient = new SockJsClient(transports);
WebSocketStompClient stompClient = new WebSocketStompClient(sockJsClient);
// stompClient.setMessageConverter(new StringMessageConverter());
stompClient.setMessageConverter(new org.springframework.messaging.converter.MappingJackson2MessageConverter());
stompClient.setTaskScheduler(taskScheduler);
stompClient.setDefaultHeartbeat(new long[] { 0, 0 });
ConsumerStompSessionHandler handler = new ConsumerStompSessionHandler(BROADCAST_MESSAGE_COUNT, connectLatch, subscribeLatch, messageLatch,
disconnectLatch, failure, clientNum);
HashMap<String, Object> params = new HashMap<String, Object>();
params.put("userid", userUid);
WebSocketHttpHeaders wsHeaders = new WebSocketHttpHeaders();
wsHeaders.add("userid", "" + userUid);
StompHeaders stompHeaders = new StompHeaders();
stompHeaders.add("userid", "" + userUid);
stompHeaders.add("channelID", "java-" + System.currentTimeMillis());
stompHeaders.add("platform", "Windows");
stompHeaders.add("clientIP", "10.1.1.1");
// stompClient.connect(stompUrl, handler, params);
stompClient.connect(stompUrl, wsHeaders, stompHeaders, handler, params);
}
private static class ConsumerStompSessionHandler extends StompSessionHandlerAdapter {
private final int expectedMessageCount;
private final CountDownLatch connectLatch;
private final CountDownLatch subscribeLatch;
private final CountDownLatch messageLatch;
private final CountDownLatch disconnectLatch;
private final AtomicReference<Throwable> failure;
private AtomicInteger messageCount = new AtomicInteger(0);
int clientNum = 0;
public ConsumerStompSessionHandler(int expectedMessageCount, CountDownLatch connectLatch, CountDownLatch subscribeLatch,
CountDownLatch messageLatch, CountDownLatch disconnectLatch, AtomicReference<Throwable> failure, int clientNum)
{
this.expectedMessageCount = expectedMessageCount;
this.connectLatch = connectLatch;
this.subscribeLatch = subscribeLatch;
this.messageLatch = messageLatch;
this.disconnectLatch = disconnectLatch;
this.failure = failure;
this.clientNum = clientNum;
}
#Override
public void afterConnected(final StompSession session, StompHeaders connectedHeaders)
{
__ActiveConn = __ActiveConn + 1;
this.connectLatch.countDown();
session.setAutoReceipt(true);
final RequestUserList req = new RequestUserList();
req.setCustomerid(customerID);
String channelID = System.currentTimeMillis() + "";
String subscribeChannel = __SUBSCRIBE_PREDICATE_QUEUE + channelID;
final String sendChannel = __SEND_PREDICATE + "userListOnline";
req.setChannelID(channelID);
// session.send(sendChannel, req);
// System.out.println("Client " + clientNum + " connected");
session.subscribe(subscribeChannel, new StompFrameHandler() {
#Override
public Type getPayloadType(StompHeaders headers)
{
System.out.println("Got ResponseH");
return String.class;
}
#Override
public void handleFrame(StompHeaders headers, Object payload)
{
System.out.println("Got ResponseA");
/* if (messageCount.incrementAndGet() == expectedMessageCount)
{
messageLatch.countDown();
disconnectLatch.countDown();
session.disconnect();
}*/
}
}).addReceiptTask(new Runnable() {
#Override
public void run()
{
System.out.println("Got Response for client " + clientNum);
//subscribeLatch.countDown();
}
});
// session.send(sendChannel, req);
}
#Override
public void handleTransportError(StompSession session, Throwable exception)
{
__ErrorConn = __ErrorConn + 1;
logger.error("Transport error", exception);
this.failure.set(exception);
if (exception instanceof ConnectionLostException)
{
this.disconnectLatch.countDown();
}
}
#Override
public void handleException(StompSession s, StompCommand c, StompHeaders h, byte[] p, Throwable ex)
{
logger.error("Handling exception", ex);
this.failure.set(ex);
}
#Override
public void handleFrame(StompHeaders headers, Object payload)
{
System.out.println("Got ResponseF");
Exception ex = new Exception(headers.toString());
logger.error("STOMP ERROR frame", ex);
this.failure.set(ex);
}
#Override
public String toString()
{
return "ConsumerStompSessionHandler[messageCount=" + this.messageCount + "]";
}
}
public static void main(String[] args)
{
try
{
int clientCount = 3000;
for (int x = 0; x < clientCount; x++){
runTest(121807, x+1);
}
System.out.println("DONE...");
System.out.println("Live Connections = " + __ActiveConn);
System.out.println("Error Connections = " + __ErrorConn);
/*
for (int x = 0; x < clientCount; x++)
{
final int clientNum = x;
ThreadPoolManager.executorService.execute(new Runnable() {
#Override
public void run()
{
try
{
Thread.sleep(500);
} catch (Exception e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
runTest(121807, clientNum);
}
});
}
*/
System.out.println("DONE..Waiting..");
Thread.sleep(1000000);
} catch (InterruptedException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}

how can i use spring framework with lucene

all.
i am a newbie of lucene, and i'm using spring-mvc (3.2.5.RELEASE) and lucene(4.6.0).
both are newest version currently.
how can i use NEAR REAL TIME search?
i write this code to get instance of IndexWriter (sington)
package com.github.yingzhuo.mycar.search;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.wltea.analyzer.lucene.IKAnalyzer;
public class IndexWriterFactoryBean implements FactoryBean<IndexWriter>, InitializingBean, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(IndexWriterFactoryBean.class);
private Analyzer analyzer = new IKAnalyzer(false);
private Resource indexDirectory = null;
private IndexWriter indexWriter = null;
private Directory directory = null;
public IndexWriterFactoryBean() {
if (indexDirectory != null) {
try {
if (! indexDirectory.getFile().exists()) {
FileUtils.forceMkdir(indexDirectory.getFile());
}
} catch (IOException e) {
LOGGER.warn(e.getMessage(), e);
}
}
}
#Override
public IndexWriter getObject() throws Exception {
return indexWriter;
}
#Override
public Class<?> getObjectType() {
return IndexWriter.class;
}
#Override
public boolean isSingleton() {
return true;
}
#Override
public void afterPropertiesSet() throws Exception {
Assert.notNull(analyzer, "property 'analyzer' must be set.");
Assert.notNull(indexDirectory, "property 'indexDirectory' must be set.");
directory = FSDirectory.open(indexDirectory.getFile());
indexWriter = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_46, analyzer));
}
#Override
public void destroy() throws Exception {
IOUtils.closeQuietly(indexWriter);
IOUtils.closeQuietly(directory);
IOUtils.closeQuietly(analyzer);
}
// getter & setter
// ------------------------------------------------------------------------------------------
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
public void setIndexDirectory(Resource indexDirectory) {
this.indexDirectory = indexDirectory;
}
}
and this utility to get DirectoryReader by static method.
import java.io.IOException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import com.github.yingzhuo.mycar.config.SpringUtils;
public final class DirectoryReaderHolder {
private static DirectoryReader HOLDER = null;
public synchronized static DirectoryReader get() {
if (HOLDER == null) {
try {
HOLDER = DirectoryReader.open(SpringUtils.getBean(IndexWriter.class), true);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
return HOLDER;
}
public static synchronized void set(DirectoryReader directoryReader) {
if (directoryReader == null) {
throw new NullPointerException();
} else {
HOLDER = directoryReader;
}
}
}
and this bean to inject into my spring-mvc controller. In 'create' method, i am trying to get a new reader before i create a IndexSearcher, but HOW SHOULD I HANDLE THE OLD READER ?
can i close it directly? if other threads are still using the old reader, very bad thing will happen ?
package com.github.yingzhuo.mycar.search;
import java.io.IOException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.search.IndexSearcher;
public class IndexSearcherManager {
public IndexSearcher create() {
try {
DirectoryReader oldReader = DirectoryReaderHolder.get();
DirectoryReader newReader = DirectoryReader.openIfChanged(oldReader);
if (newReader != null) {
oldReader.close(); // AM I RIGHT ???
oldReader = newReader;
}
return new IndexSearcher(oldReader);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}
Any suggestions? thank you.

Resources