I am using Spring and Spring Security and want to use spring-session-data-redis with RedisHttpSessionConfiguration to enable storing session IDs on redis (so clients wont loose their sessions when webapp fails and switched over to another server).
My question, what happens when Redis server is down?
Will spring be able to continue to work by storing session in memory until Redis is back up? Is there a way to configure this as so?
I am using Redis on AWS ElastiCache, and Failover can take several minutes before replacement primary node is configured on the DNS.
As far as I can see, you will need to provide an implementation of CacheErrorHandler ( javadoc).
You can do this by providing a Configuration instance, that implements CachingConfigurer, and overrides the errorHandler() method.
For example:
#Configuration
#Ena1bleCaching
public class MyApp extends SpringBootServletInitializer implements CachingConfigurer {
#Override
public CacheErrorHandler errorHandler() {
return MyAppCacheErrorHandler();
}
}
Exactly HOW you will then provide uninterrupted service is not clear to me - without duplicating the current sessions in your failover cache, it seems impossible.
If you are using ElasticCache, is it not possible to have AWS handle a replicated setup for you, so that if one node goes doen, the other can take over?
I've managed to implement a fail-over mechanism to an in-memory session whenever Redis is unreachable. Unfortunately this can't be done just by a Spring property, so you have to implement your custom SessionRepository and configuring it to be used the SessionRepositoryFilter which will fail-over to the in-memory cache whenever Redis is unreachable .
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Primary;
import org.springframework.session.MapSession;
import org.springframework.session.Session;
import org.springframework.session.SessionRepository;
import org.springframework.stereotype.Component;
#Component("customSessionRepository")
#Primary
public class CustomFailoverToMapSessionRepository implements SessionRepository {
private static final Logger LOGGER = LoggerFactory.getLogger(CustomFailoverToMapSessionRepository.class);
private GuavaBasedSessionRepository guavaBasedSessionRepository;
private SessionRepository sessionRepository;
public CustomFailoverToMapSessionRepository(SessionRepository sessionRepository, GuavaBasedSessionRepository guavaBasedSessionRepository) {
this.sessionRepository = sessionRepository;
this.guavaBasedSessionRepository = guavaBasedSessionRepository;
}
#Override
public Session createSession() {
Session session = null;
MapSession mapSession = guavaBasedSessionRepository.createSession();
try {
session = sessionRepository.createSession();
mapSession = toMapSession(session);
} catch (Exception e) {
LOGGER.warn("Unexpected exception when trying to create a session will create just an in memory session", e);
}
return session == null ? mapSession : session;
}
#Override
public void save(Session session) {
try {
if (!isOfMapSession(session)) {
sessionRepository.save(session);
}
} catch (Exception e) {
LOGGER.warn("Unexpected exception when trying to save a session with id {} will create just an in memory session", session.getId(), e);
}
guavaBasedSessionRepository.save(toMapSession(session));
}
#Override
public Session findById(String id) {
try {
return sessionRepository.findById(id);
} catch (Exception e) {
LOGGER.warn("Unexpected exception when trying to lookup a session with id {}", id, e);
return guavaBasedSessionRepository.findById(id);
}
}
#Override
public void deleteById(String id) {
try {
try {
guavaBasedSessionRepository.deleteById(id);
} catch (Exception e) {
//ignored
}
sessionRepository.deleteById(id);
} catch (Exception e) {
LOGGER.warn("Unexpected exception when trying to delete a session with id {}", id, e);
}
}
private boolean isOfMapSession(Session session) {
return session instanceof MapSession;
}
private MapSession toMapSession(Session session) {
final MapSession mapSession = guavaBasedSessionRepository.createSession();
if (session != null) {
mapSession.setId(session.getId());
mapSession.setCreationTime(session.getCreationTime());
mapSession.setLastAccessedTime(session.getLastAccessedTime());
mapSession.setMaxInactiveInterval(session.getMaxInactiveInterval());
session.getAttributeNames()
.forEach(attributeName -> mapSession.setAttribute(attributeName, session.getAttribute(attributeName)));
}
return mapSession;
}
Implement the in-memory cache session repository using Guava
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.session.MapSession;
import org.springframework.session.Session;
import org.springframework.session.SessionRepository;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
#Component("guavaBasedSessionRepository")
public class GuavaBasedSessionRepository implements SessionRepository<MapSession> {
private Cache<String, Session> sessionCache;
#Value("${session.local.guava.cache.maximum.size}")
private int maximumCacheSize;
#Value("${redis.session.keys.timeout}")
private long sessionTimeout;
#PostConstruct
void init(){
sessionCache = CacheBuilder
.newBuilder()
.maximumSize(maximumCacheSize)
.expireAfterWrite(sessionTimeout, TimeUnit.MINUTES)
.build();
}
#Override
public void save(MapSession session) {
if (!session.getId().equals(session.getOriginalId())) {
this.sessionCache.invalidate(session.getOriginalId());
}
this.sessionCache.put(session.getId(), new MapSession(session));
}
#Override
public MapSession findById(String id) {
Session saved = null;
try {
saved = this.sessionCache.getIfPresent(id);
} catch (Exception e){
//ignored
}
if (saved == null) {
return null;
}
if (saved.isExpired()) {
deleteById(saved.getId());
return null;
}
return new MapSession(saved);
}
#Override
public void deleteById(String id) {
this.sessionCache.invalidate(id);
}
#Override
public MapSession createSession() {
MapSession result = new MapSession();
result.setMaxInactiveInterval(Duration.ofSeconds(sessionTimeout));
return result;
}
Configure Spring to use the custom SessionRepository
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.session.Session;
import org.springframework.session.data.redis.config.annotation.web.http.EnableRedisHttpSession;
import org.springframework.session.web.http.CookieHttpSessionIdResolver;
import org.springframework.session.web.http.CookieSerializer;
import org.springframework.session.web.http.SessionRepositoryFilter;
import javax.annotation.PostConstruct;
#EnableRedisHttpSession
#Configuration
public class CustomSessionConfig {
private CookieHttpSessionIdResolver defaultHttpSessionIdResolver = new CookieHttpSessionIdResolver();
#Autowired
private CookieSerializer cookieSerializer;
#PostConstruct
public void init(){
this.defaultHttpSessionIdResolver.setCookieSerializer(cookieSerializer);
}
#Bean
#Primary
public <S extends Session> SessionRepositoryFilter<? extends Session> sessionRepositoryFilter(CustomFailoverToMapSessionRepository customSessionRepository) {
SessionRepositoryFilter<S> sessionRepositoryFilter = new SessionRepositoryFilter<>(customSessionRepository);
sessionRepositoryFilter.setHttpSessionIdResolver(this.defaultHttpSessionIdResolver);
return sessionRepositoryFilter;
}
Related
Essentially what i'm looking to do is create a #Service or component that loads some data into memory from a database table which is referenced throughout the job execution
package com.squareup.se.bridge.batchworker.components.context;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.squareup.se.bridge.batchworker.repositories.BridgeBatchJobParametersRepository;
import com.squareup.se.bridge.batchworker.util.JobParameterKeys;
import com.squareup.se.bridge.core.api.services.batchworker.FatalSyncException;
import com.squareup.se.bridge.core.integration.util.logger.JobExecutionLoggerFactory;
import java.io.IOException;
import javax.validation.constraints.NotNull;
import org.slf4j.Logger;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.stereotype.Component;
#JobScope #Component public class BridgeBatchIntegrationJobContextProvider
implements JobExecutionListener {
private Logger logger;
private ObjectMapper mapper;
private BridgeBatchJobParametersRepository bridgeBatchJobParametersRepository;
private BridgeIntegrationJobContext context;
public BridgeBatchIntegrationJobContextProvider(ObjectMapper mapper,
BridgeBatchJobParametersRepository bridgeBatchJobParametersRepository) {
this.mapper = mapper;
this.bridgeBatchJobParametersRepository = bridgeBatchJobParametersRepository;
}
#Override public void beforeJob(JobExecution jobExecution) {
var jobId = jobExecution.getJobParameters().getString(JobParameterKeys.SYNC_ID);
this.logger = JobExecutionLoggerFactory.getLogger(
BridgeBatchIntegrationJobContextProvider.class, jobId);
this.context = deserializeJobParameters(jobId);
}
#NotNull public BridgeIntegrationJobContext get() {
if (context == null) {
throw new IllegalStateException("Expected context to exist before calling this method");
}
return context;
}
#Override public void afterJob(JobExecution jobExecution) { }
#NotNull private String getParameters(String jobId) {
var jobParams = bridgeBatchJobParametersRepository.find(jobId);
if (jobParams == null || jobParams.size() == 0) {
throw new FatalSyncException(String.format("No job parameters for job `%s` exists", jobId));
}
if (jobParams.size() > 1) {
throw new FatalSyncException(String.format("Multiple parameter entries exist for job `%s`",
jobId));
} else if (Strings.isNullOrEmpty(jobParams.get(0).getIntegrationContext())) {
throw new FatalSyncException(String.format("Job parameters for job `%s` is empty", jobId));
}
return jobParams.get(0).getIntegrationContext();
}
#NotNull private BridgeIntegrationJobContext deserializeJobParameters(String jobId) {
try {
return mapper.readValue(getParameters(jobId),
BridgeIntegrationJobContext.class);
} catch (IOException e) {
//TODO page on this
logger.info(e.getMessage(), e);
throw new FatalSyncException(e);
}
}
}
I've configured a job like this:
return jobBuilderFactory.get(CUSTOMERS_BATCH_JOB_NAME)
.incrementer(new RunIdIncrementer())
.start(loadFromOriginStep)
.next(retryFailuresFromOriginStep)
.listener(bridgeBatchIntegrationJobContextProvider)
.listener(jobListener)
.build();
The constructor depends on other beans including a jackson object mapper and a JPA repo. I'm encountering a few problems:
the constructor is not instantiated by Spring and thus the instance
variables I want to bind are not present
If I remove #JobScope from the component, Spring constructs the component instance.
I don't see where #JobContext is used in your code, and according to your requirement, you don't need it.
If you want to load some data in the job execution context using a listener, you can do it in beforeJob with jobExecution.getExecutionContext().put("key", "value");.
That said, it is not recommended to load a lot of data in the execution context as it is persisted between steps.
So unless you are loading a small amount of data in the execution context, you need to find another approach (like using a separate cache for example, see Spring Batch With Annotation and Caching).
In Spring boot 1.3.6-RELEASE I had the below class registered to jersey. Every java.util.Date field would be read and returned as ISO8601 format. However, when updating to 1.4.1-RELEASE it now sometimes works and sometimes doesn't. What's the new proper way to enable this?
package com.mypackage;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.text.ParsePosition;
import java.util.Date;
import javax.ws.rs.ext.ParamConverter;
import javax.ws.rs.ext.ParamConverterProvider;
import javax.ws.rs.ext.Provider;
import com.fasterxml.jackson.databind.util.ISO8601Utils;
#Provider
public class DateTimeParamConverterProvider implements ParamConverterProvider {
#SuppressWarnings("unchecked")
#Override
public <T> ParamConverter<T> getConverter(Class<T> clazz, Type type, Annotation[] annotations) {
if (type.equals(Date.class)) {
return (ParamConverter<T>) new DateTimeParamConverter();
} else {
return null;
}
}
static class DateTimeParamConverter implements ParamConverter<Date> {
#Override
public java.util.Date fromString(String value) {
if (value == null) {
return null;
}
try {
return ISO8601Utils.parse(value, new ParsePosition(0));
} catch (ParseException e) {
throw new RuntimeException(e);
}
}
#Override
public String toString(Date value) {
return ISO8601Utils.format(value);
}
}
}
I register this provider like this:
#Component
#ApplicationPath("/")
public class JerseyConfiguration extends ResourceConfig {
private static final Logger log = Logger.getLogger(JerseyConfiguration.class.getName());
#Autowired
public JerseyConfiguration(LogRequestFilter lrf) {
register(new ObjectMapperContextResolverNonNull());
register(RestServiceImpl.class);
property(ServletProperties.FILTER_FORWARD_ON_404, true);
register(DateTimeParamConverterProvider.class, 6000);
...
Just define this in your application.properties:
spring.jackson.date-format=com.fasterxml.jackson.databind.util.ISO8601DateFormat
all.
i am a newbie of lucene, and i'm using spring-mvc (3.2.5.RELEASE) and lucene(4.6.0).
both are newest version currently.
how can i use NEAR REAL TIME search?
i write this code to get instance of IndexWriter (sington)
package com.github.yingzhuo.mycar.search;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.wltea.analyzer.lucene.IKAnalyzer;
public class IndexWriterFactoryBean implements FactoryBean<IndexWriter>, InitializingBean, DisposableBean {
private static final Logger LOGGER = LoggerFactory.getLogger(IndexWriterFactoryBean.class);
private Analyzer analyzer = new IKAnalyzer(false);
private Resource indexDirectory = null;
private IndexWriter indexWriter = null;
private Directory directory = null;
public IndexWriterFactoryBean() {
if (indexDirectory != null) {
try {
if (! indexDirectory.getFile().exists()) {
FileUtils.forceMkdir(indexDirectory.getFile());
}
} catch (IOException e) {
LOGGER.warn(e.getMessage(), e);
}
}
}
#Override
public IndexWriter getObject() throws Exception {
return indexWriter;
}
#Override
public Class<?> getObjectType() {
return IndexWriter.class;
}
#Override
public boolean isSingleton() {
return true;
}
#Override
public void afterPropertiesSet() throws Exception {
Assert.notNull(analyzer, "property 'analyzer' must be set.");
Assert.notNull(indexDirectory, "property 'indexDirectory' must be set.");
directory = FSDirectory.open(indexDirectory.getFile());
indexWriter = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_46, analyzer));
}
#Override
public void destroy() throws Exception {
IOUtils.closeQuietly(indexWriter);
IOUtils.closeQuietly(directory);
IOUtils.closeQuietly(analyzer);
}
// getter & setter
// ------------------------------------------------------------------------------------------
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
public void setIndexDirectory(Resource indexDirectory) {
this.indexDirectory = indexDirectory;
}
}
and this utility to get DirectoryReader by static method.
import java.io.IOException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import com.github.yingzhuo.mycar.config.SpringUtils;
public final class DirectoryReaderHolder {
private static DirectoryReader HOLDER = null;
public synchronized static DirectoryReader get() {
if (HOLDER == null) {
try {
HOLDER = DirectoryReader.open(SpringUtils.getBean(IndexWriter.class), true);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
return HOLDER;
}
public static synchronized void set(DirectoryReader directoryReader) {
if (directoryReader == null) {
throw new NullPointerException();
} else {
HOLDER = directoryReader;
}
}
}
and this bean to inject into my spring-mvc controller. In 'create' method, i am trying to get a new reader before i create a IndexSearcher, but HOW SHOULD I HANDLE THE OLD READER ?
can i close it directly? if other threads are still using the old reader, very bad thing will happen ?
package com.github.yingzhuo.mycar.search;
import java.io.IOException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.search.IndexSearcher;
public class IndexSearcherManager {
public IndexSearcher create() {
try {
DirectoryReader oldReader = DirectoryReaderHolder.get();
DirectoryReader newReader = DirectoryReader.openIfChanged(oldReader);
if (newReader != null) {
oldReader.close(); // AM I RIGHT ???
oldReader = newReader;
}
return new IndexSearcher(oldReader);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
}
Any suggestions? thank you.
I am trying to do an Jetty Web Socket example .
I copied a example from internet , which was working fine when i deployed directly into server without making any chnages .
But when i copied the Source (the servlet) into Eclipse IDE , it was giving Compilation
Exceptions related to
The method onClose(int, String) of type Html5Servlet.StockTickerSocket must override a superclass method
- The method onOpen(WebSocket.Connection) of type Html5Servlet.StockTickerSocket must override a superclass method
The method onMessage(String) of type Html5Servlet.StockTickerSocket must override a superclass method
This is my servlet , i kept the jars as it is mentioned in that example
package org.ajeesh.app;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.http.HttpServletRequest;
import org.eclipse.jetty.websocket.WebSocket;
import org.eclipse.jetty.websocket.WebSocketServlet;
public class Html5Servlet extends WebSocketServlet {
private AtomicInteger index = new AtomicInteger();
private static final List<String> tickers = new ArrayList<String>();
static{
tickers.add("ajeesh");
tickers.add("peeyu");
tickers.add("kidillan");
tickers.add("entammo");
}
/**
*
*/
private static final long serialVersionUID = 1L;
public WebSocket doWebSocketConnect(HttpServletRequest req, String resp) {
System.out.println("On server");
return new StockTickerSocket();
}
protected String getMyJsonTicker(){
StringBuilder start=new StringBuilder("{");
start.append("\"stocks\":[");
int counter=0;
for (String aTicker : tickers) {
counter++;
start.append("{ \"ticker\":\""+aTicker +"\""+","+"\"price\":\""+index.incrementAndGet()+"\" }");
if(counter<tickers.size()){
start.append(",");
}
}
start.append("]");
start.append("}");
return start.toString();
}
public class StockTickerSocket implements WebSocket.OnTextMessage{
private Connection connection;
private Timer timer;
#Override
public void onClose(int arg0, String arg1) {
System.out.println("Web socket closed!");
}
#Override
public void onOpen(Connection connection) {
System.out.println("onOpen!");
this.connection=connection;
this.timer=new Timer();
}
#Override
public void onMessage(String data) {
System.out.println("onMessage!");
if(data.indexOf("disconnect")>=0){
connection.close();
timer.cancel();
}else{
sendMessage();
}
}
private void sendMessage() {
System.out.println("sendMessage!");
if(connection==null||!connection.isOpen()){
System.out.println("Connection is closed!!");
return;
}
timer.schedule(new TimerTask() {
#Override
public void run() {
try{
System.out.println("Running task");
connection.sendMessage(getMyJsonTicker());
}
catch (IOException e) {
e.printStackTrace();
}
}
}, new Date(),5000);
}
}
}
I want to use spring managed beans for my JSF2 controllers so that autowiring works. I know that there is no #ViewScoped in spring and I know a few implementations of #ViewScoped floating around various blogs (one from the primefaces lead).
Is any of them used in a real application and considered stable? Maybe one of them is recommended or widely used and I'm just not able to find it.
There is one :) Without memory leaks and with #PreDestroy support. Tested in production. Here.
package org.nkey.primefaces.scopes.test.spring.scope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.config.Scope;
import javax.faces.component.UIViewRoot;
import javax.faces.context.FacesContext;
import javax.faces.event.PreDestroyViewMapEvent;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpSessionBindingEvent;
import javax.servlet.http.HttpSessionBindingListener;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
/**
* #author m.nikolaev Date: 21.11.12 Time: 0:37
*/
public class ViewScope implements Scope, Serializable, HttpSessionBindingListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ViewScope.class);
private final WeakHashMap<HttpSession, Set<ViewScopeViewMapListener>> sessionToListeners = new WeakHashMap<>();
#Override
public Object get(String name, ObjectFactory objectFactory) {
Map<String, Object> viewMap = FacesContext.getCurrentInstance().getViewRoot().getViewMap();
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (viewMap) {
if (viewMap.containsKey(name)) {
return viewMap.get(name);
} else {
LOGGER.debug("Creating bean {}", name);
Object object = objectFactory.getObject();
viewMap.put(name, object);
return object;
}
}
}
#Override
public Object remove(String name) {
throw new UnsupportedOperationException();
}
#Override
public String getConversationId() {
return null;
}
#Override
public void registerDestructionCallback(String name, Runnable callback) {
LOGGER.debug("registerDestructionCallback for bean {}", name);
UIViewRoot viewRoot = FacesContext.getCurrentInstance().getViewRoot();
ViewScopeViewMapListener listener =
new ViewScopeViewMapListener(viewRoot, name, callback, this);
viewRoot.subscribeToViewEvent(PreDestroyViewMapEvent.class, listener);
HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(true);
final Set<ViewScopeViewMapListener> sessionListeners;
synchronized (sessionToListeners) {
if (!sessionToListeners.containsKey(httpSession)) {
sessionToListeners.put(httpSession, new HashSet<ViewScopeViewMapListener>());
}
sessionListeners = sessionToListeners.get(httpSession);
}
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (sessionListeners) {
Set<ViewScopeViewMapListener> toRemove = new HashSet<>();
for (ViewScopeViewMapListener viewMapListener : sessionListeners) {
if (viewMapListener.checkRoot()) {
toRemove.add(viewMapListener);
}
}
sessionListeners.removeAll(toRemove);
sessionListeners.add(listener);
}
if (!FacesContext.getCurrentInstance().getExternalContext().getSessionMap().containsKey("sessionBindingListener")) {
FacesContext.getCurrentInstance().getExternalContext().getSessionMap().put("sessionBindingListener", this);
}
}
#Override
public Object resolveContextualObject(String key) {
return null;
}
#Override
public void valueBound(HttpSessionBindingEvent event) {
LOGGER.debug("Session event bound {}", event.getName());
}
#Override
public void valueUnbound(HttpSessionBindingEvent event) {
LOGGER.debug("Session event unbound {}", event.getName());
final Set<ViewScopeViewMapListener> listeners;
synchronized (sessionToListeners) {
if (sessionToListeners.containsKey(event.getSession())) {
listeners = sessionToListeners.get(event.getSession());
sessionToListeners.remove(event.getSession());
} else {
listeners = null;
}
}
if (listeners != null) {
for (ViewScopeViewMapListener listener : listeners) {
listener.doCallback();
}
}
}
public void clearFromListener(ViewScopeViewMapListener listener) {
LOGGER.debug("Removing listener from map");
HttpSession httpSession = (HttpSession) FacesContext.getCurrentInstance().getExternalContext().getSession(false);
if (httpSession != null) {
synchronized (sessionToListeners) {
if (sessionToListeners.containsKey(httpSession)) {
sessionToListeners.get(httpSession).remove(listener);
}
}
}
}
}