Spring Zuul Header based routing hangs when downstream is not responsive for some time - spring

I have implemented the below code to route requests based on headers to the respective downstreams.
When the downstream are unresponsive for sometime Zuul stops forwarding the requests, it does not forward the request when the downstream is up again.
There are no errors in the logs.
package com.uk.proxy.filter;
import ai.cuddle.sift.dataapi.proxy.Application;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cloud.netflix.zuul.filters.support.FilterConstants;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import static org.springframework.cloud.netflix.zuul.filters.support.FilterConstants.ROUTE_TYPE;
#Component
public class RoutingFilter extends ZuulFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(Application.class);
private static final String DEFAULT_DOWNSTREAM_GROUP = "SYSTEM";
public static final String HEADER_ORIGIN = "";
#Autowired
#Qualifier(value = "downstreamConfig")
private Map<String, String> downstreamMap;
#Override
public String filterType() {
return ROUTE_TYPE;
}
#Override
public int filterOrder() {
return FilterConstants.PRE_DECORATION_FILTER_ORDER - 100;
}
#Override
public boolean shouldFilter() {
return true;
}
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest request = ctx.getRequest();
String inputURI = request.getRequestURI();
String header = request.getHeader(HEADER_ORIGIN);
try {
String downstreamHost = getDownstreamHost(header);
LOGGER.info(" Header "+header+ " Downstream Host "+downstreamHost);
ctx.setRouteHost(new URL(downstreamHost));
ctx.put("requestURI", inputURI);
} catch (MalformedURLException e) {
LOGGER.error(e.getMessage(), e);
}
return null;
}
private String getDownstreamHost(String header) {
if(header == null || header.isEmpty()){
LOGGER.warn("Header is null or empty");
}
String downstreamGroup = (header == null || header.isEmpty()) ? DEFAULT_DOWNSTREAM_GROUP : header.toUpperCase();
String downstreamHost;
if (downstreamMap.containsKey(downstreamGroup)) {
downstreamHost = downstreamMap.get(downstreamGroup);
} else {
LOGGER.error("Header "+header+" not found in config. DownstreamMap "+downstreamMap);
downstreamHost = downstreamMap.get(DEFAULT_DOWNSTREAM_GROUP);
}
return downstreamHost;
}
}
application.yaml
zuul:
ignoredPatterns:
- /manage/**
routes:
yourService:
path: /**
stripPrefix: false
serviceId: customServiceId
host:
connect-timeout-millis: 300000
socket-timeout-millis: 300000
ribbon:
eureka:
enabled: false
Spring cloud version: Hoxton.SR8.
Please let me know if anyone has faced this issue.

Related

Spring rsocket security with Webflux security

My Application is a Spring Webflux application with Spring boot version 2.6.6.
Since, I have a chat and notification requirement for the logged in user, trying to use RSocket over websocket for notification & messaging along with Webflux for web based application.
Using Spring security for my web application with the config below and it is working. Now, not sure if I will be able to use the same security for RSocket as RSocket over websocket will be established when the user is logged in.
My Webflux security,
/**
*
*/
package com.TestApp.service.admin.spring.security;
import static java.util.stream.Collectors.toList;
import static org.springframework.security.config.Customizer.withDefaults;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.security.reactive.PathRequest;
import org.springframework.context.annotation.Bean;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.messaging.rsocket.RSocketStrategies;
import org.springframework.messaging.rsocket.annotation.support.RSocketMessageHandler;
import org.springframework.security.authentication.ReactiveAuthenticationManager;
import org.springframework.security.config.annotation.rsocket.EnableRSocketSecurity;
import org.springframework.security.config.annotation.rsocket.RSocketSecurity;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.SecurityWebFiltersOrder;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
import org.springframework.security.messaging.handler.invocation.reactive.AuthenticationPrincipalArgumentResolver;
import org.springframework.security.rsocket.core.PayloadSocketAcceptorInterceptor;
import org.springframework.security.web.server.SecurityWebFilterChain;
import org.springframework.security.web.server.authentication.ServerAuthenticationFailureHandler;
import org.springframework.security.web.server.authentication.ServerAuthenticationSuccessHandler;
import org.springframework.security.web.server.authentication.logout.LogoutWebFilter;
import org.springframework.security.web.server.authentication.logout.RedirectServerLogoutSuccessHandler;
import org.springframework.security.web.server.authentication.logout.ServerLogoutHandler;
import org.springframework.security.web.server.authentication.logout.ServerLogoutSuccessHandler;
import org.springframework.security.web.server.authorization.HttpStatusServerAccessDeniedHandler;
import org.springframework.security.web.server.context.ServerSecurityContextRepository;
import org.springframework.security.web.server.csrf.CookieServerCsrfTokenRepository;
import org.springframework.security.web.server.util.matcher.AndServerWebExchangeMatcher;
import org.springframework.security.web.server.util.matcher.OrServerWebExchangeMatcher;
import org.springframework.security.web.server.util.matcher.PathPatternParserServerWebExchangeMatcher;
import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher;
import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher.MatchResult;
import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers;
import org.springframework.web.server.WebSession;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.testapp.service.admin.spring.TestAppProperties;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
#EnableWebFluxSecurity
public class AdminSecurityConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(AdminSecurityConfig.class);
private static final String[] DEFAULT_FILTER_MAPPING = new String[] { "/**" };
private static final String authenticateHeaderValue = "TestApp";
private static final String unauthorizedJsonBody = "{\"message\": \"You are not authorized\"}";
#Autowired
private TestAppProperties testAppProps;
#Bean
public SecurityWebFilterChain securitygWebFilterChain(final ServerHttpSecurity http,
final ReactiveAuthenticationManager authManager,
final ServerSecurityContextRepository securityContextRepository,
final TestAppAuthenticationFailureHandler failureHandler,
final ObjectProvider<TestAppLogoutHandler> availableLogoutHandlers) {
http.securityContextRepository(securityContextRepository);
return http.authorizeExchange().matchers(PathRequest.toStaticResources().atCommonLocations()).permitAll()
.pathMatchers(TestAppProps.getSecurity().getIgnorePatterns()).permitAll()
.anyExchange().authenticated().and().formLogin().loginPage(TestAppProps.getSecurity().getLoginPath())
.authenticationSuccessHandler(authSuccessHandler()).and().exceptionHandling()
.authenticationEntryPoint((exchange, exception) -> Mono.error(exception))
.accessDeniedHandler(new HttpStatusServerAccessDeniedHandler(HttpStatus.UNAUTHORIZED)).and().build();
}
#Bean
public ServerAuthenticationSuccessHandler authSuccessHandler() {
return new TestAppAuthSuccessHandler("/");
}
#Bean
public ServerLogoutSuccessHandler logoutSuccessHandler(String uri) {
RedirectServerLogoutSuccessHandler successHandler = new RedirectServerLogoutSuccessHandler();
successHandler.setLogoutSuccessUrl(URI.create(uri));
return successHandler;
}
#Bean(name = "failure-handler-bean")
public TestAppAuthenticationFailureHandler defaultFailureHandler() {
try {
new ObjectMapper().reader().readTree(unauthorizedJsonBody);
} catch (final IOException e) {
throw new IllegalArgumentException("'unauthorizedJsonBody' property is not valid JSON.", e);
}
return new TestAppAdminAuthFailureHandler(authenticateHeaderValue, unauthorizedJsonBody);
}
#Bean
public AuthenticatedPrinciplaProvider TestAppSecurityPrincipalProvider() {
return new TestAppSecurityContextPrincipleProvider();
}
}
public class TestAppSecurityContextPrincipleProvider implements AuthenticatedPrinciplaProvider {
#Override
public Mono<WhskrUserDetails> retrieveUser() {
return principalMono.flatMap(principal -> {
if (principal instanceof UsernamePasswordAuthenticationToken) {
final TestAppUserDetails user = (TestAppUserDetails) ((UsernamePasswordAuthenticationToken) principal)
.getPrincipal();
LOGGER.debug("User principal found for ID {} Org {} ", user.getUserId(), user.getOrgId());
return Mono.just(user);
}
return Mono.error(() -> new IllegalArgumentException(NO_USER_AUTH_ERROR));
})
}
}
This is working as expected. Have a login page and user gets redirected to the home page after the successful login.
Now, I am adding RSocket over websocket for messaging and notification for the logged in user.
implementation 'org.springframework.boot:spring-boot-starter-webflux'
implementation 'org.springframework.boot:spring-boot-starter-security'
implementation 'org.springframework.security:spring-security-messaging'
implementation 'org.springframework.security:spring-security-rsocket'
implementation 'org.springframework.boot:spring-boot-starter-rsocket'
RSocketSecurityConfig,
#EnableWebFluxSecurity
public class AdminRSocketSecurityConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(AdminRSocketSecurityConfig.class);
private static final String[] DEFAULT_FILTER_MAPPING = new String[] { "/**" };
private static final String authenticateHeaderValue = "TestApp";
private static final String unauthorizedJsonBody = "{\"message\": \"You are not authorized\"}";
#Autowired
private TestAppProperties TestAppProps;
#Autowired
private AuthenticatedPrinciplaProvider secContext;
static final String RSOCKET_CONVERTER_BEAN_NAME = "RSocketAuthConverter";
private static final String HEADERS = "headers";
private static final MimeType COMPOSITE_METADATA_MIME_TYPE = MimeTypeUtils
.parseMimeType(WellKnownMimeType.MESSAGE_RSOCKET_COMPOSITE_METADATA.getString());
private static final MimeType APPLICATION_JSON_MIME_TYPE = MimeTypeUtils
.parseMimeType(WellKnownMimeType.APPLICATION_JSON.getString());
#Bean
public RSocketStrategies rsocketStrategies() {
return RSocketStrategies.builder()
.encoders(encoders -> encoders.add(new Jackson2CborEncoder()))
.decoders(decoders -> decoders.add(new Jackson2CborDecoder()))
.routeMatcher(new PathPatternRouteMatcher())
.build();
}
#Bean
public RSocketMessageHandler messageHandler(RSocketStrategies strategies) {
RSocketMessageHandler handler = new RSocketMessageHandler();
HandlerMethodArgumentResolver resolver = new AuthenticationPrincipalArgumentResolver();
handler.getArgumentResolverConfigurer().addCustomResolver(resolver);
handler.setRSocketStrategies(strategies);
return handler;
}
#Bean
public PayloadSocketAcceptorInterceptor authorization(final ReactiveAuthenticationManager authManager,
final RSocketSecurity security) {
security.authorizePayload(authorize -> authorize.setup().authenticated()).authenticationManager(authManager);
return security.build();
}
}
RSocketController,
#Controller
public class RSocketController {
private static final Logger LOGGER = LoggerFactory.getLogger(RSocketController.class);
private static final Map<Integer, Map<Integer, RSocketRequester>> CLIENT_REQUESTER_MAP = new HashMap<>();
static final String SERVER = "Server";
static final String RESPONSE = "Response";
static final String STREAM = "Stream";
static final String CHANNEL = "Channel";
#Autowired
private AuthenticatedPrinciplaProvider secContext;
#ConnectMapping
// void onConnect(RSocketRequester rSocketRequester, #Payload Integer userId) {
void onConnect(RSocketRequester rSocketRequester) {
secContext.retrieveUser().flatMap(usr -> {
LOGGER.info("Client connect request for userId {} ", usr.getUserId());
rSocketRequester.rsocket().onClose().doFirst(() -> {
CLIENT_REQUESTER_MAP.put(usr.getUserId(), rSocketRequester);
}).doOnError(error -> {
LOGGER.info("Client connect request for userId {} ", usr.getUserId());
}).doFinally(consumer -> {
LOGGER.info("Removing here for userId {} ", usr.getUserId());
if (CLIENT_REQUESTER_MAP.get(usr.getBranchId()) != null) {
CLIENT_REQUESTER_MAP.remove(usr.getUserId(), rSocketRequester);
}
}).subscribe();
return Mono.empty();
}).subscribe();
}
}
From the RSocket over WebSocket client, the call is not going to the controller as auth is failing.
But, When I set "authorize.setup().permitAll()" in my RSocketSecurityConfig authorization(), the call goes to the controller, but the retrieveUser() fails.
I am not sure, How can I use the same security which is being used for my web based application for RSocket security as well?
So, When user is not logged in to my web app, the rsocket over websocket should fail and it should work only when the user is logged in. The RSocket initial call is happening once the user is logged in.

Spring Feign Not Compressing Response

I am using spring feign to compress request and response
On Server Side:
server:
servlet:
context-path: /api/v1/
compression:
enabled: true
min-response-size: 1024
When I hit the api from chrome, I see that it adds 'Accept-Encoding': "gzip, deflate, br"
On Client Side:
server:
port: 8192
servlet:
context-path: /api/demo
feign.compression.response.enabled: true
feign.client.config.default.loggerLevel: HEADERS
logging.level.com.example.feigndemo.ManagementApiService: DEBUG
eureka:
client:
enabled: false
management-api:
ribbon:
listOfServers: localhost:8080
When I see the request headers passed, feign is passing two headers.
Accept-Encoding: deflate
Accept-Encoding: gzip
gradle file
plugins {
id 'org.springframework.boot' version '2.1.8.RELEASE'
id 'io.spring.dependency-management' version '1.0.8.RELEASE'
id 'java'
}
group = 'com.example'
version = '0.0.1-SNAPSHOT'
sourceCompatibility = '1.8'
configurations {
compileOnly {
extendsFrom annotationProcessor
}
}
repositories {
mavenCentral()
}
ext {
set('springCloudVersion', "Greenwich.SR2")
}
dependencies {
implementation 'org.springframework.boot:spring-boot-starter-web'
compile ('org.springframework.cloud:spring-cloud-starter-netflix-ribbon')
compile('org.springframework.cloud:spring-cloud-starter-openfeign')
// https://mvnrepository.com/artifact/io.github.openfeign/feign-httpclient
// https://mvnrepository.com/artifact/io.github.openfeign/feign-httpclient
//compile group: 'io.github.openfeign', name: 'feign-httpclient', version: '9.5.0'
compileOnly 'org.projectlombok:lombok'
annotationProcessor 'org.projectlombok:lombok'
testImplementation 'org.springframework.boot:spring-boot-starter-test'
}
dependencyManagement {
imports {
mavenBom "org.springframework.cloud:spring-cloud-dependencies:${springCloudVersion}"
}
}
The response is not compressed. What I have seen is that Spring feign is sending the "Accept-Encoding" as two different values
Let me know if thing is wrong here
I have faced the same issue a couple of weeks back and I came to know that there is no fruitful/straight forward way of doing it. I have also got to know that when #patan reported the issue with the spring community #patan reported issue1 and #patan reported issue2 there was a ticket created for the tomcat side to attempt to fix the issue (issue link). There has been also a ticket (ticket link) present in the Jetty side related to the same. Initially, I planned to use the approach suggested in github but later came to know that the library had been already merged into spring-cloud-openfeign-core jar under org.springframework.cloud.openfeign.encoding package. Nevertheless, we could not achieve compression as expected and faced the following two challenges:
When we enable the feign compression by settings the org.springframework.cloud.openfeign.encoding.FeignAcceptGzipEncodingInterceptor (code-link) class adds the Accept-Encoding header with values as gzip and deflate but due to the issue (ticket) the tomcat server could not interpret it as a sign of compression signal. As a solution, we have to add the manual Feign interpreter to override the
FeignAcceptGzipEncodingInterceptor functionality and concatenate the headers.
The default compression settings for Feign perfectly work in the most simple scenarios but when there is a situation when Client calling microservice and that microservice calling another microservice through feign then the feign cannot handle the compressed response because Spring cloud open feign decoder does not decompress response by default (default spring open feign decoder) which eventually ends with the issue (issue link). So we have to write our own decoder to achieve decompression.
I have finally found a solution based on various available resources so just follow the steps for the spring feign compression:
application.yml
spring:
http:
encoding:
enabled: true
#to enable server side compression
server:
compression:
enabled: true
mime-types:
- application/json
min-response-size: 2048
#to enable feign side request/response compression
feign:
httpclient:
enabled: true
compression:
request:
enabled: true
mime-types:
- application/json
min-request-size: 2048
response:
enabled: true
NOTE: The above feign configuration my default enables compression to all feign clients.
CustomFeignDecoder
import feign.Response;
import feign.Util;
import feign.codec.Decoder;
import org.springframework.cloud.openfeign.encoding.HttpEncoding;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Objects;
import java.util.zip.GZIPInputStream;
public class CustomGZIPResponseDecoder implements Decoder {
final Decoder delegate;
public CustomGZIPResponseDecoder(Decoder delegate) {
Objects.requireNonNull(delegate, "Decoder must not be null. ");
this.delegate = delegate;
}
#Override
public Object decode(Response response, Type type) throws IOException {
Collection<String> values = response.headers().get(HttpEncoding.CONTENT_ENCODING_HEADER);
if(Objects.nonNull(values) && !values.isEmpty() && values.contains(HttpEncoding.GZIP_ENCODING)){
byte[] compressed = Util.toByteArray(response.body().asInputStream());
if ((compressed == null) || (compressed.length == 0)) {
return delegate.decode(response, type);
}
//decompression part
//after decompress we are delegating the decompressed response to default
//decoder
if (isCompressed(compressed)) {
final StringBuilder output = new StringBuilder();
final GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(compressed));
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(gis, StandardCharsets.UTF_8));
String line;
while ((line = bufferedReader.readLine()) != null) {
output.append(line);
}
Response uncompressedResponse = response.toBuilder().body(output.toString().getBytes()).build();
return delegate.decode(uncompressedResponse, type);
}else{
return delegate.decode(response, type);
}
}else{
return delegate.decode(response, type);
}
}
private static boolean isCompressed(final byte[] compressed) {
return (compressed[0] == (byte) (GZIPInputStream.GZIP_MAGIC)) && (compressed[1] == (byte) (GZIPInputStream.GZIP_MAGIC >> 8));
}
}
FeignCustomConfiguration
import feign.RequestInterceptor;
import feign.RequestTemplate;
import feign.optionals.OptionalDecoder;
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.http.HttpMessageConverters;
import org.springframework.cloud.openfeign.support.ResponseEntityDecoder;
import org.springframework.cloud.openfeign.support.SpringDecoder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
public class CustomFeignConfiguration {
#Autowired
private ObjectFactory<HttpMessageConverters> messageConverters;
//concatenating headers because of https://github.com/spring-projects/spring-boot/issues/18176
#Bean
public RequestInterceptor gzipInterceptor() {
return new RequestInterceptor() {
#Override
public void apply(RequestTemplate template) {
template.header("Accept-Encoding", "gzip, deflate");
}
};
}
#Bean
public CustomGZIPResponseDecoder customGZIPResponseDecoder() {
OptionalDecoder feignDecoder = new OptionalDecoder(new ResponseEntityDecoder(new SpringDecoder(this.messageConverters)));
return new CustomGZIPResponseDecoder(feignDecoder);
}
}
Additional tips
if you are planning to build the CustomDecoder with just feign-core libraries
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import feign.Response;
import feign.Util;
import feign.codec.DecodeException;
import feign.codec.Decoder;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.util.StringUtils;
import org.springframework.web.client.HttpMessageConverterExtractor;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.lang.reflect.WildcardType;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Map;
import java.util.Objects;
import java.util.zip.GZIPInputStream;
import static java.util.zip.GZIPInputStream.GZIP_MAGIC;
public class CustomGZIPResponseDecoder implements Decoder {
private final Decoder delegate;
public CustomGZIPResponseDecoder(Decoder delegate) {
Objects.requireNonNull(delegate, "Decoder must not be null. ");
this.delegate = delegate;
}
#Override
public Object decode(Response response, Type type) throws IOException {
Collection<String> values = response.headers().get("Content-Encoding");
if (Objects.nonNull(values) && !values.isEmpty() && values.contains("gzip")) {
byte[] compressed = Util.toByteArray(response.body().asInputStream());
if ((compressed == null) || (compressed.length == 0)) {
return delegate.decode(response, type);
}
if (isCompressed(compressed)) {
Response uncompressedResponse = getDecompressedResponse(response, compressed);
return getObject(type, uncompressedResponse);
} else {
return getObject(type, response);
}
} else {
return getObject(type, response);
}
}
private Object getObject(Type type, Response response) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
if (response.status() == 404 || response.status() == 204)
return Util.emptyValueOf(type);
if (Objects.isNull(response.body()))
return null;
if (byte[].class.equals(type))
return Util.toByteArray(response.body().asInputStream());
if (isParameterizeHttpEntity(type)) {
type = ((ParameterizedType) type).getActualTypeArguments()[0];
if (type instanceof Class || type instanceof ParameterizedType
|| type instanceof WildcardType) {
#SuppressWarnings({"unchecked", "rawtypes"})
HttpMessageConverterExtractor<?> extractor = new HttpMessageConverterExtractor(
type, Collections.singletonList(new MappingJackson2HttpMessageConverter(mapper)));
Object decodedObject = extractor.extractData(new FeignResponseAdapter(response));
return createResponse(decodedObject, response);
}
throw new DecodeException(HttpStatus.INTERNAL_SERVER_ERROR.value(),
"type is not an instance of Class or ParameterizedType: " + type);
} else if (isHttpEntity(type)) {
return delegate.decode(response, type);
} else if (String.class.equals(type)) {
String responseValue = Util.toString(response.body().asReader());
return StringUtils.isEmpty(responseValue) ? Util.emptyValueOf(type) : responseValue;
} else {
String s = Util.toString(response.body().asReader());
JavaType javaType = TypeFactory.defaultInstance().constructType(type);
return !StringUtils.isEmpty(s) ? mapper.readValue(s, javaType) : Util.emptyValueOf(type);
}
}
public static boolean isCompressed(final byte[] compressed) {
return (compressed[0] == (byte) (GZIP_MAGIC)) && (compressed[1] == (byte) (GZIP_MAGIC >> 8));
}
public static Response getDecompressedResponse(Response response, byte[] compressed) throws IOException {
final StringBuilder output = new StringBuilder();
final GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(compressed));
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(gis, StandardCharsets.UTF_8));
String line;
while ((line = bufferedReader.readLine()) != null) {
output.append(line);
}
return response.toBuilder().body(output.toString().getBytes()).build();
}
public static String getDecompressedResponseAsString(byte[] compressed) throws IOException {
final StringBuilder output = new StringBuilder();
final GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(compressed));
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(gis, StandardCharsets.UTF_8));
String line;
while ((line = bufferedReader.readLine()) != null) {
output.append(line);
}
return output.toString();
}
private boolean isParameterizeHttpEntity(Type type) {
if (type instanceof ParameterizedType) {
return isHttpEntity(((ParameterizedType) type).getRawType());
}
return false;
}
private boolean isHttpEntity(Type type) {
if (type instanceof Class) {
Class c = (Class) type;
return HttpEntity.class.isAssignableFrom(c);
}
return false;
}
private <T> ResponseEntity<T> createResponse(Object instance, Response response) {
MultiValueMap<String, String> headers = new LinkedMultiValueMap<>();
for (String key : response.headers().keySet()) {
headers.put(key, new LinkedList<>(response.headers().get(key)));
}
return new ResponseEntity<>((T) instance, headers, HttpStatus.valueOf(response
.status()));
}
private class FeignResponseAdapter implements ClientHttpResponse {
private final Response response;
private FeignResponseAdapter(Response response) {
this.response = response;
}
#Override
public HttpStatus getStatusCode() throws IOException {
return HttpStatus.valueOf(this.response.status());
}
#Override
public int getRawStatusCode() throws IOException {
return this.response.status();
}
#Override
public String getStatusText() throws IOException {
return this.response.reason();
}
#Override
public void close() {
try {
this.response.body().close();
} catch (IOException ex) {
// Ignore exception on close...
}
}
#Override
public InputStream getBody() throws IOException {
return this.response.body().asInputStream();
}
#Override
public HttpHeaders getHeaders() {
return getHttpHeaders(this.response.headers());
}
private HttpHeaders getHttpHeaders(Map<String, Collection<String>> headers) {
HttpHeaders httpHeaders = new HttpHeaders();
for (Map.Entry<String, Collection<String>> entry : headers.entrySet()) {
httpHeaders.put(entry.getKey(), new ArrayList<>(entry.getValue()));
}
return httpHeaders;
}
}
}
and if you are planning to build your own Feign builder then you can configure like below
Feign.builder().decoder(new CustomGZIPResponseDecoder(new feign.optionals.OptionalDecoder(new feign.codec.StringDecoder())))
.target(SomeFeignClient.class, "someurl");
Update to the above answer:
If you are planning to update the dependency version of spring-cloud-openfeign-core to 'org.springframework.cloud:spring-cloud-openfeign-core:2.2.5.RELEASE' then aware of the following Change in FeignContentGzipEncodingAutoConfiguration class.
In FeignContentGzipEncodingAutoConfiguration class the Signature of the ConditionalOnProperty annotation changed from
#ConditionalOnProperty("feign.compression.request.enabled", matchIfMissing = false) to #ConditionalOnProperty(value = "feign.compression.request.enabled"), so by default FeignContentGzipEncodingInterceptor bean will be injected into spring container if you have application property feign.request.compression=true in your environment and compress request body if default/configured size limit exceeds. This results a problem if your server don't have a mechanism to handle the compressed request, in such cases add/modify the property as feign.request.compression=false
This is actually an exception in Tomcat and Jetty - multiple encoding headers as given above is legal and should work, however Tomcat and Jetty have a bug that is preventing them to both be read.
The bug has been reported in the spring boot github here.
And in tomcat here for reference.
In Tomcat the issue is fixed in 9.0.25 so if you can update to that, that can solve it. Failing that, here is a workaround you can do to fix it:
You will need to create your own request interceptor to reconcile your gzip, deflate headers into a single header.
This interceptor needs to be added to the FeignClient configuration, and that configuration added to your feign client.
import feign.RequestInterceptor;
import feign.RequestTemplate;
import feign.template.HeaderTemplate;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
/**
* This is a workaround interceptor based on a known bug in Tomcat and Jetty where
* the requests are unable to perform gzip compression if the headers are in collection format.
* This is fixed in tomcat 9.0.25 - once we reach this version we can remove this class
*/
#Slf4j
public class GzipRequestInterceptor implements RequestInterceptor {
#Override
public void apply(RequestTemplate template) {
// don't add encoding to all requests - only to the ones with the incorrect header format
if (requestHasDualEncodingHeaders(template)) {
replaceTemplateHeader(template, "Accept-Encoding", Collections.singletonList("gzip,deflate"));
}
}
private boolean requestHasDualEncodingHeaders(RequestTemplate template) {
return template.headers().get("Accept-Encoding").contains("deflate")
&& template.headers().get("Accept-Encoding").contains("gzip");
}
/** Because request template is immutable, we have to do some workarounds to get to the headers */
private void replaceTemplateHeader(RequestTemplate template, String key, Collection<String> value) {
try {
Field headerField = RequestTemplate.class.getDeclaredField("headers");
headerField.setAccessible(true);
((Map)headerField.get(template)).remove(key);
HeaderTemplate newEncodingHeaderTemplate = HeaderTemplate.create(key, value);
((Map)headerField.get(template)).put(key, newEncodingHeaderTemplate);
} catch (NoSuchFieldException e) {
LOGGER.error("exception when trying to access the field [headers] via reflection");
} catch (IllegalAccessException e) {
LOGGER.error("exception when trying to get properties from the template headers");
}
}
}
I know the above looks overkill, but because the template headers are unmodifiable, we just use a bit of reflection to modify them to how we want.
Add the above interceptor to your configuration bean
import feign.RequestInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
public class FeignGzipEncodingConfiguration {
#Bean
public RequestInterceptor gzipRequestInterceptor() {
return new GzipRequestInterceptor();
}
}
You can finally add this to your feign client with the configuration annotation parameter
#FeignClient(name = "feign-client", configuration = FeignGzipEncodingConfiguration.class)
public interface FeignClient {
...
}
The request interceptor should now be hit when you send a feign-client request for gzipped information. This will wipe the dual header, and write in an acceptable string concatenated one in the form of gzip,deflate
If you are using latest spring boot version then it provides default Gzip decoder so no need to write your custom decoder. Use the below property instead:-
feign:
compression:
response:
enabled: true
useGzipDecoder: true

Sticky session Ribbon rule in Zuul always has null request

I am attempting to implement a sticky session load balancer rule in a Zuul proxy service. I am using the code from this example: https://github.com/alejandro-du/vaadin-microservices-demo/blob/master/proxy-server/src/main/java/com/example/StickySessionRule.java
I seem to have everything configured correctly, and the rule is triggering in my debugger, but the call to RequestContext.getCurrentContext().getResponse() always returns null, so the cookie is never found, so the rule never takes effect.
The rest of the Zuul config is working 100%. My traffic is proxied and routed and I can use the app fine, only the sticky session rule is not working.
Is there another step I am missing to get the request wired in to this rule correctly?
My route config:
zuul.routes.appname.path=/appname/**
zuul.routes.appname.sensitiveHeaders=
zuul.routes.appname.stripPrefix=false
zuul.routes.appname.retryable=true
zuul.add-host-header=true
zuul.routes.appname.service-id=APP_NAME
hystrix.command.APP_NAME.execution.isolation.strategy=THREAD
hystrix.command.APP_NAME.execution.isolation.thread.timeoutInMilliseconds=125000
APP_NAME.ribbon.ServerListRefreshInterval=10000
APP_NAME.ribbon.retryableStatusCodes=500
APP_NAME.ribbon.MaxAutoRetries=5
APP_NAME.ribbon.MaxAutoRetriesNextServer=1
APP_NAME.ribbon.OkToRetryOnAllOperations=true
APP_NAME.ribbon.ReadTimeout=5000
APP_NAME.ribbon.ConnectTimeout=5000
APP_NAME.ribbon.EnablePrimeConnections=true
APP_NAME.ribbon.NFLoadBalancerRuleClassName=my.package.name.StickySessionRule
The app:
#EnableZuulProxy
#SpringBootApplication
public class ApplicationGateway {
public static void main(String[] args) {
SpringApplication.run(ApplicationGateway.class, args);
}
#Bean
public LocationRewriteFilter locationRewriteFilter() {
return new LocationRewriteFilter();
}
}
EDIT: As requested, the code:
import com.netflix.loadbalancer.Server;
import com.netflix.loadbalancer.ZoneAvoidanceRule;
import com.netflix.zuul.context.RequestContext;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
/**
* #author Alejandro Duarte.
*/
public class StickySessionRule extends ZoneAvoidanceRule {
public static final String COOKIE_NAME_SUFFIX = "-" + StickySessionRule.class.getSimpleName();
#Override
public Server choose(Object key) {
Optional<Cookie> cookie = getCookie(key);
if (cookie.isPresent()) {
Cookie hash = cookie.get();
List<Server> servers = getLoadBalancer().getReachableServers();
Optional<Server> server = servers.stream()
.filter(s -> s.isAlive() && s.isReadyToServe())
.filter(s -> hash.getValue().equals("" + s.hashCode()))
.findFirst();
if (server.isPresent()) {
return server.get();
}
}
return useNewServer(key);
}
private Server useNewServer(Object key) {
Server server = super.choose(key);
HttpServletResponse response = RequestContext.getCurrentContext().getResponse();
if (response != null) {
String cookieName = getCookieName(server);
Cookie newCookie = new Cookie(cookieName, "" + server.hashCode());
newCookie.setPath("/");
response.addCookie(newCookie);
}
return server;
}
private Optional<Cookie> getCookie(Object key) {
HttpServletRequest request = RequestContext.getCurrentContext().getRequest();
if (request != null) {
Server server = super.choose(key);
String cookieName = getCookieName(server);
Cookie[] cookies = request.getCookies();
if (cookies != null) {
return Arrays.stream(cookies)
.filter(c -> c.getName().equals(cookieName))
.findFirst();
}
}
return Optional.empty();
}
private String getCookieName(Server server) {
return server.getMetaInfo().getAppName() + COOKIE_NAME_SUFFIX;
}
}
I think you are missing a PreFilter, like this:
import com.netflix.zuul.context.RequestContext;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.springframework.cloud.netflix.zuul.filters.support.FilterConstants;
public class PreFilter extends com.netflix.zuul.ZuulFilter {
#Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
RequestContext.getCurrentContext().set(FilterConstants.LOAD_BALANCER_KEY, ctx.getRequest());
return null;
}
#Override
public boolean shouldFilter() {
return true;
}
#Override
public int filterOrder() {
return FilterConstants.SEND_RESPONSE_FILTER_ORDER;
}
#Override
public String filterType() {
return "pre";
}
}
Mark as Bean
#Bean
public PreFilter preFilter() {
return new PreFilter();
}
And use it in your rule
#Override
public Server choose(Object key) {
javax.servlet.http.HttpServletRequest request = (javax.servlet.http.HttpServletRequest) key;
RequestContext not working cause "hystrix.command.APP_NAME.execution.isolation.strategy=THREAD"

Set SSL and timeout to WS call in Spring (HttpsUrlConnectionMessageSender, HttpsUrlConnectionMessageSender)

I am trying to set timeout and SSL (https) for WS call:
PS: No need to mark this as duplicated, the only similar question has never been answered.
I tried HttpsUrlConnectionMessageSender that adds support for (self-signed) HTTPS certificates but it does support timeout.
when I switch to HttpComponentsMessageSender that supports timeout (Connection and read timeouts) it does support SSL.
I want to combile timeout and ssl to when calling WS:
webServiceTemplate.setDefaultUri(uri);
response = webServiceTemplate.marshalSendAndReceive(inputs, new SoapHandler(createCredentials(), soapAction));
Finally, did it using HttpComponentsMessageSender. Here is my code:
HttpComponentsMessageSender messageSender = new HttpComponentsMessageSender();
HttpClient httpClient = HttpClientFactory.getHttpsClient(sslUtils, timeout);
messageSender.setHttpClient(httpClient);
webServiceTemplate.setMessageSender(messageSender);
I also created a new factory class HttpClientFactory that sets the SSL and timeout:
import java.io.IOException;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLContextBuilder;
import org.apache.http.conn.ssl.SSLContexts;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext;
public class HttpClientFactory {
private static CloseableHttpClient client;
private HttpClientFactory() {
}
public static HttpClient getHttpsClient(SslUtils sslUtils, int timeout) throws Exception {
if (client != null) {
return client;
}
SSLContext sslcontext = getSSLContext(sslUtils);
SSLConnectionSocketFactory factory = new SSLConnectionSocketFactory(sslcontext, new HostnameVerifier() {
#Override
public boolean verify(String hostname, SSLSession session) {
return true;
}
});
HttpClientBuilder httpClientBuilder = HttpClients.custom();
httpClientBuilder.addInterceptorFirst(new ContentLengthHeaderRemover());
RequestConfig config = RequestConfig.custom()
.setConnectTimeout(timeout)
.setConnectionRequestTimeout(timeout)
.setSocketTimeout(timeout)
.build();
return httpClientBuilder.setSSLSocketFactory(factory)
.setDefaultRequestConfig(config)
.build();
}
private static class ContentLengthHeaderRemover implements HttpRequestInterceptor {
#Override
public void process(HttpRequest request, HttpContext context) throws HttpException, IOException {
request.removeHeaders(HTTP.CONTENT_LEN);
}
}
public static void releaseInstance() {
client = null;
}
private static SSLContext getSSLContext(SslUtils sslUtils) throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException, KeyManagementException {
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(sslUtils.getKeystore().getInputStream(), sslUtils.getKeyPwd().toCharArray());
sslUtils.getKeystore().getInputStream().close();
KeyStore ts = KeyStore.getInstance("JKS");
ts.load(sslUtils.getTrustStore().getInputStream(), sslUtils.getTrustPwd().toCharArray());
sslUtils.getTrustStore().getInputStream().close();
SSLContextBuilder sslContextBuilder = SSLContexts.custom();
try {
sslContextBuilder = SSLContexts.custom().loadKeyMaterial(ks, ssl.getKeyPwd().toCharArray());
} catch (UnrecoverableKeyException e) {
e.printStack();
}
sslContextBuilder.loadTrustMaterial(ts, new TrustSelfSignedStrategy());
return sslContextBuilder.build();
}
}
For information the SslUtils is just a bean class that holds the keystore and truststore informations' :
public class SslUtils {
private Resource keystore;
private String keyPwd;
private Resource trustStore;
private String trustPwd;
// Getters and Setters
}
This works for me and let me use both SSL and timeout at the same. I hope this will help others.
In a case of HTTPS protocol with basic authentication, you may not need a certificate, you can set the encoded username:password into the header of the request
package com.james.medici.app.ws;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.oxm.jaxb.Jaxb2Marshaller;
import org.springframework.ws.client.core.WebServiceTemplate;
import org.springframework.ws.transport.http.HttpUrlConnectionMessageSender;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.util.Base64;
#Slf4j
#Configuration
public class SoapClientConfiguration {
#Value("${james.medici.url}")
private String defaultUri;
#Value("${james.medici.username}")
private String userName;
#Value("${james.medici.passcode}")
private String userPassword;
public static final String SEPARATOR = ":";
public static final String AUTHORIZATION = "Authorization";
public static final String BASIC = "Basic ";
class CustomHttpUrlConnectionMessageSender extends HttpUrlConnectionMessageSender {
#Override
protected void prepareConnection(HttpURLConnection connection) throws IOException {
Base64.Encoder enc = Base64.getEncoder();
String userpassword = StringUtils.joinWith(SEPARATOR, userName, userPassword);
String encodedAuthorization = enc.encodeToString(userpassword.getBytes());
connection.setRequestProperty(AUTHORIZATION, BASIC + encodedAuthorization);
super.prepareConnection(connection);
}
}
#Bean
public Jaxb2Marshaller marshaller() {
Jaxb2Marshaller marshaller = new Jaxb2Marshaller();
marshaller.setContextPath("com.james.medici.app.ws.model");
return marshaller;
}
#Bean
public WebServiceTemplate webServiceTemplate() {
log.info(defaultUri);
WebServiceTemplate webServiceTemplate = new WebServiceTemplate();
webServiceTemplate.setMarshaller(marshaller());
webServiceTemplate.setUnmarshaller(marshaller());
webServiceTemplate.setDefaultUri(defaultUri);
webServiceTemplate.setMessageSender(new CustomHttpUrlConnectionMessageSender());
return webServiceTemplate;
}
}

How to call other eureka client in a Zuul server

application.properties
zuul.routes.commonservice.path=/root/path/commonservice/**
zuul.routes.commonservice.service-id=commonservice
zuul.routes.customer.path=/root/path/customer/**
zuul.routes.customer.service-id=customer
zuul.routes.student.path=/root/path/student/**
zuul.routes.student.service-id=student
and below is my custom filter
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import com.openreach.gateway.common.constant.CommonConstant;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
#Component
public class HeaderFilter extends ZuulFilter {
private static final Logger log = LoggerFactory.getLogger(HeaderFilter.class);
#Override
public String filterType() {
return "pre";
}
#Override
public int filterOrder() {
return 1;
}
#Override
public boolean shouldFilter() {
return true;
}
#Override
public Object run() {
RequestContext context = RequestContext.getCurrentContext();
HttpSession httpSession = context.getRequest().getSession();
String idOrEmail = context.getRequest().getHeader("coustom");
if (httpSession.getAttribute("someAttributes") == null) {
if (idOrEmail != null) {
//call the common-service and get details and set it first
//then call the customer service with common-service details
} else {
//call the customer service
}
} else {
log.info("data excits");
// routrs the request to the backend with the excisting data details
}
context.addZuulResponseHeader("Cookie", "JSESSIONID=" + httpSession.getId());
return null;
}
}
I'm using the ribbon load balancer with zuul. My problem is that how should I call the common-service first? I need all my requests to check the header value and then call the actual service end point.
First, use the #LoadBalanced qualifier to create your RestTemplate bean which is load balanced.
#LoadBalanced
#Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
And Inject the bean into the filter
#Autowired
RestTemplate restTemplate;
Then you can get result by restTemplate's method like below
String result = restTemplate.postForObject("http://commonservice/url", object, String.class);
ref: http://cloud.spring.io/spring-cloud-static/spring-cloud.html#_spring_resttemplate_as_a_load_balancer_client

Resources