RSocketRequester doesn't consider datamimeType - spring-boot

Trying to use Protobuf with RSocket, Requester doesn't consider dataMimeType set to application/protobuf or application/vnd.google.protobuf. I get error No decoder
Client Application
#SpringBootApplication
#Slf4j
public class PersonServiceClientApplication {
#Bean
RSocketRequester rSocketRequester(RSocketStrategies rSocketStrategies) {
return RSocketRequester.builder()
.rsocketFactory(
factory -> factory.dataMimeType("application/protobuf").frameDecoder(PayloadDecoder.ZERO_COPY))
.rsocketStrategies(rSocketStrategies).connectTcp("localhost", 9080).retry().block();
}
#Bean
public RSocketStrategiesCustomizer protobufRSocketStrategyCustomizer() {
return (strategy) -> {
strategy.decoder(new ProtobufDecoder());
strategy.encoder(new ProtobufEncoder());
};
}
public static void main(String[] args) {
ApplicationContext context = SpringApplication.run(PersonServiceClientApplication.class, args);
RSocketRequester req = context.getBean(RSocketRequester.class);
req.route("io.github.kprasad99.person.get3").retrieveMono(Person.class)
.doOnNext(e -> log.info(e.getFirstName())).block(Duration.ofSeconds(30));
}
}
Stacktrace
Exception in thread "restartedMain" java.lang.reflect.InvocationTargetException
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.springframework.boot.devtools.restart.RestartLauncher.run(RestartLauncher.java:49)
Caused by: java.lang.IllegalArgumentException: No decoder for io.github.kprasad.person.proto.PersonProto$Person
at org.springframework.messaging.rsocket.RSocketStrategies.decoder(RSocketStrategies.java:92)
at org.springframework.messaging.rsocket.DefaultRSocketRequester$DefaultRequestSpec.retrieveMono(DefaultRSocketRequester.java:274)
at org.springframework.messaging.rsocket.DefaultRSocketRequester$DefaultRequestSpec.retrieveMono(DefaultRSocketRequester.java:258)
at io.github.kprasad99.person.PersonServiceClientApplication.main(PersonServiceClientApplication.java:69)
... 5 more
However if I explicitly disable RSocketStrategiesAutoConfiguration, and recreate RSocketStrategies bean, it works.
SpringBootApplication(exclude = { RSocketStrategiesAutoConfiguration.class })
#Slf4j
public class PersonServiceClientApplication {
private static final String PATHPATTERN_ROUTEMATCHER_CLASS = "org.springframework.web.util.pattern.PathPatternRouteMatcher";
#Bean
public RSocketStrategies rSocketStrategies(ObjectProvider<RSocketStrategiesCustomizer> customizers) {
RSocketStrategies.Builder builder = RSocketStrategies.builder();
if (ClassUtils.isPresent(PATHPATTERN_ROUTEMATCHER_CLASS, null)) {
builder.routeMatcher(new PathPatternRouteMatcher());
}
customizers.orderedStream().forEach((customizer) -> customizer.customize(builder));
return builder.build();
}
#Bean
RSocketRequester rSocketRequester(RSocketStrategies rSocketStrategies) {
return RSocketRequester.builder()
.rsocketFactory(
factory -> factory.dataMimeType("application/protobuf").frameDecoder(PayloadDecoder.ZERO_COPY))
.rsocketStrategies(rSocketStrategies).connectTcp("localhost", 9080).retry().block();
}
#Bean
public RSocketStrategiesCustomizer protobufRSocketStrategyCustomizer() {
return (strategy) -> {
strategy.decoder(new ProtobufDecoder());
strategy.encoder(new ProtobufEncoder());
};
}
public static void main(String[] args) {
ApplicationContext context = SpringApplication.run(PersonServiceClientApplication.class, args);
RSocketRequester req = context.getBean(RSocketRequester.class);
req.route("io.github.kprasad99.person.get3").retrieveMono(Person.class)
.doOnNext(e -> log.info(e.getFirstName())).block(Duration.ofSeconds(30));
}
}
Why neither dataMimeType nor protobufRSocketStrategyCustomizer bean considered why decoding.

Able to solve this, needed use application/x-protobuf. Below is the beans created for client.
#Bean
public Mono<RSocketRequester> rSocketRequester(
RSocketRequester.Builder rsocketRequesterBuilder, ClientTransport clientTransport,
RSocketStrategies strategies) {
Mono<RSocketRequester> rsocketRequester = rsocketRequesterBuilder.rsocketStrategies(strategies)
.dataMimeType(new MimeType("application", "x-protobuf"))
.connect(clientTransport).log();
return rsocketRequester;
}
#Bean
public RSocketStrategiesCustomizer protobufRSocketStrategyCustomizer() {
return (strategy) -> {
strategy.decoder(new ProtobufDecoder());
strategy.encoder(new ProtobufEncoder());
};
}

Related

How to configure specific configurations to spring-cloud-starter-circuitbreaker-resilience4j?

I was configuration spring-cloud-starter-circuitbreaker-resilience4j in my application. When i set the specific configuration my factory no load then configurations.
#Bean
public Customizer<Resilience4JCircuitBreakerFactory> customizer() {
CircuitBreakerConfig circuitBreakerConfig = CircuitBreakerConfig.custom()
.slidingWindowType(CircuitBreakerConfig.SlidingWindowType.COUNT_BASED)
.slidingWindowSize(15)
.minimumNumberOfCalls(8)
.waitDurationInOpenState(Duration.ofSeconds(10))
.build();
Customizer<Resilience4JCircuitBreakerFactory> customizer = factory ->
factory.configure(builder -> builder.circuitBreakerConfig(circuitBreakerConfig), "circuit");
return customizer;
}
Service
#Slf4j
#Service
#RequiredArgsConstructor
public class ServiceA {
private final CircuitBreakerFactory circuitBreakerFactory;
public String get() {
final CircuitBreaker circuitBreaker = circuitBreakerFactory.create("circuit");
return circuitBreaker.run(this::strings, this::fallbackMethod);
}
private String fallbackMethod(final Throwable t) {
log.info("teste");
log.info(t.getMessage());
return "b";
}
private String strings() {
return "a";
}
}

kafkaendpointlistenerregistry.start() throws null pointer exception

I have a requirement where I want to start Kakfa consumer manually.
Code :
class Dummy implements
ConsumerSeekAware
{
#Autowired
KafkaListenerEndpointRegistry registry;
CountDownLatch latch;
#Autowired
ConcurrentKafkaListenerContainerFactory factory;
onIdleEvent(){
latch.countdown()
}
#KafkaListener(id="myContainer",
topics="mytopic",
autoStartup="false")
public void listen() {}
#Scheduled(cron=" some time ")
void do_some_consumption(){
latch = new CountDownLatch(1);
this.registry.getListenerContainer("myContainer").start();
latch.await();
do processing
this.registry.getListenerContainer("myContainer").stop()
}
}
I have made the bean of
ConcurrentKafkaListenerContainerFactory with all props in my other Config class which I am Autowiring here.
However, I get a null pointer exception when I start my container
using this.registry.getListenerContainer("myContainer").start()
java.lang.NullPointerException: null
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.scheduling.support.ScheduledMethodRunnable.run(ScheduledMethodRunnable.java:84)
at org.springframework.scheduling.support.DelegatingErrorHandlingRunnable.run(DelegatingErrorHandlingRunnable.java:54)
at org.springframework.scheduling.concurrent.ReschedulingRunnable.run(ReschedulingRunnable.java:93)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
I just copied your code into a Spring Boot app (which auto configures the factories); and everything works perfectly as expected...
#SpringBootApplication
#EnableScheduling
public class So62412316Application {
public static void main(String[] args) {
SpringApplication.run(So62412316Application.class, args);
}
#Bean
public ApplicationRunner runner(KafkaTemplate<String, String> template) {
return args -> {
template.send("mytopic", "foo");
};
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("mytopic").partitions(1).replicas(1).build();
}
}
#Component
class Dummy implements ConsumerSeekAware {
#Autowired
KafkaListenerEndpointRegistry registry;
CountDownLatch latch;
#Autowired
ConcurrentKafkaListenerContainerFactory factory;
#EventListener
public void onIdleEvent(ListenerContainerIdleEvent event) {
System.out.println(event);
latch.countDown();
}
#KafkaListener(id = "myContainer", topics = "mytopic", autoStartup = "false")
public void listen(String in) {
System.out.println(in);
}
#Scheduled(initialDelay = 5_000, fixedDelay = 60_000)
void do_some_consumption() throws InterruptedException {
latch = new CountDownLatch(1);
this.registry.getListenerContainer("myContainer").start();
latch.await();
this.registry.getListenerContainer("myContainer").stop();
}
}
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.listener.idle-event-interval=5000

Spring WebClient Error - reactor.core.publisher.FluxOnAssembly cannot be cast to class reactor.core.publisher.Mono

Following is the rest service that I have implemented
#GetMapping(produces = MediaType.TEXT_EVENT_STREAM_VALUE, value = "/events")
Flux<Event> events() {
Flux<Event> eventFlux = Flux.fromStream(Stream.generate {new Event(System.currentTimeMillis(), LocalDate.now())})
Flux<Long> durationFlux = Flux.interval(Duration.ofSeconds(1))
return Flux.zip(eventFlux, durationFlux)
.map {it.t1}
}
public static void main(String[] args) {
SpringApplication.run(ReactiveServiceApplication)
}
The webclient to consume this looks as follows.
#Bean
WebClient client() {
return WebClient.create("http://localhost:8080")
}
#Bean
CommandLineRunner demo (WebClient client) {
return { args ->
client.get().uri("/events")
.accept(MediaType.TEXT_EVENT_STREAM)
.exchange()
.flatMap { response -> response.bodyToFlux(Event.class) }
.subscribe { println it }
}
}
public static void main(String[] args) {
new SpringApplicationBuilder(ReactiveClientApplication)
.properties(Collections.singletonMap("server.port","8081"))
.run(args)
}
The error I am getting is as follows.
reactor.core.Exceptions$ErrorCallbackNotImplemented: java.lang.ClassCastException: class reactor.core.publisher.FluxOnAssembly cannot be cast to class reactor.core.publisher.Mono (reactor.core.publisher.FluxOnAssembly and reactor.core.publisher.Mono are in unnamed module of loader 'app')
Caused by: java.lang.ClassCastException: class reactor.core.publisher.FluxOnAssembly cannot be cast to class reactor.core.publisher.Mono (reactor.core.publisher.FluxOnAssembly and reactor.core.publisher.Mono are in unnamed module of loader 'app')
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:118) ~[reactor-core-3.3.5.RELEASE.jar:3.3.5.RELEASE]
Why is the error happening ?
The reason was that using flatMap actualy returns a Mono. So the closure { response -> response.bodyToFlux(Event.class) } was actually returning a Flux where as the flatMap was expecting a Mono.
Changing to flatMapMany will fix this. Code below.
CommandLineRunner getFluxDemoExchange (WebClient client) {
return { args ->
client.get().uri("/events")
.accept(MediaType.TEXT_EVENT_STREAM)
.exchange()
.flatMapMany { response -> response.bodyToFlux(Event.class) }
.subscribe { println it }
}
}

Bean not getting overridden in Spring boot

I am trying to write and test an application that used spring-cloud with azure functions following this tutorial.
https://github.com/markusgulden/aws-tutorials/tree/master/spring-cloud-function/spring-cloud-function-azure/src/main/java/de/margul/awstutorials/springcloudfunction/azure
I am tryign to write a testcase and override the bean.
Here is the application class having function and handler Bean function.
#SpringBootApplication
#ComponentScan(basePackages = { "com.package" })
public class DataFunctions extends AzureSpringBootRequestHandler<GenericMessage<Optional<String>>, Data> {
#FunctionName("addData")
public HttpResponseMessage addDataRun(
#HttpTrigger(name = "add", methods = {
HttpMethod.POST }, authLevel = AuthorizationLevel.FUNCTION) HttpRequestMessage<Optional<String>> request,
final ExecutionContext context) throws JsonParseException, JsonMappingException, IOException {
context.getLogger().info("Java HTTP trigger processed a POST request.");
try {
handleRequest(new GenericMessage<Optional<String>>(request.getBody()), context);
} catch (ServiceException ex) {
ErrorMessage em = new ErrorMessage();
return request.createResponseBuilder(handleException(ex, em)).body(em).build();
}
return request.createResponseBuilder(HttpStatus.CREATED).build();
}
#Autowired
MyService mService;
#Bean
public Consumer<GenericMessage<Optional<String>>> addData() {
ObjectMapper mapper = new ObjectMapper();
return req -> {
SomeModel fp = null;
try {
fp = mapper.readValue(req.getPayload().get(), SomeModel.class);
} catch (Exception e) {
throw new ServiceException(e);
}
mService.addData(fp);
};
}
}
I want to test by overriding the above bean.
Cosmosdb spring configuration
#Configuration
#EnableDocumentDbRepositories
public class CosmosDBConfig extends AbstractDocumentDbConfiguration {
#Value("${cosmosdb.collection.endpoint}")
private String uri;
#Value("${cosmosdb.collection.key}")
private String key;
#Value("${cosmosdb.collection.dbname}")
private String dbName;
#Value("${cosmosdb.connect.directly}")
private Boolean connectDirectly;
#Override
public DocumentDBConfig getConfig() {
ConnectionPolicy cp = ConnectionPolicy.GetDefault();
if (connectDirectly) {
cp.setConnectionMode(ConnectionMode.DirectHttps);
} else {
cp.setConnectionMode(ConnectionMode.Gateway);
}
return DocumentDBConfig.builder(uri, key, dbName).connectionPolicy(cp).build();
}
}
Here is the configuration
#TestConfiguration
#PropertySource(value = "classpath:application.properties", encoding = "UTF-8")
#Profile("test")
#Import({DataFunctions.class})
public class TestConfig {
#Bean(name="addData")
#Primary
public Consumer<GenericMessage<Optional<String>>> addData() {
return req -> {
System.out.println("data mock");
};
}
#Bean
#Primary
public DocumentDBConfig getConfig() {
return Mockito.mock(DocumentDBConfig.class);
}
}
Finally the test class
#RunWith(SpringRunner.class)
//#SpringBootTest //Enabling this gives initialization error.
#ActiveProfiles("test")
public class TempTest {
#InjectMocks
DataFunctions func;
#Mock
MyService mService;
#Before
public void setup() {
MockitoAnnotations.initMocks(this);
}
private Optional<String> createRequestString(final String res) throws IOException {
InputStream iStream = TempTest.class.getResourceAsStream(res);
String charset="UTF-8";
try (BufferedReader br = new BufferedReader(new InputStreamReader(iStream, charset))) {
return Optional.of(br.lines().collect(Collectors.joining(System.lineSeparator())));
}
}
#Test
public void testHttpPostTriggerJava() throws Exception {
#SuppressWarnings("unchecked")
final HttpRequestMessage<Optional<String>> req = mock(HttpRequestMessage.class);
final Optional<String> queryBody = createRequestString("/test-data.json");
doNothing().when(mService).addData(Mockito.any(SomeModel.class));
doReturn(queryBody).when(req).getBody();
doAnswer(new Answer<HttpResponseMessage.Builder>() {
#Override
public HttpResponseMessage.Builder answer(InvocationOnMock invocation) {
HttpStatus status = (HttpStatus) invocation.getArguments()[0];
return new HttpResponseMessageMock.HttpResponseMessageBuilderMock().status(status);
}
}).when(req).createResponseBuilder(any(HttpStatus.class));
final ExecutionContext context = mock(ExecutionContext.class);
doReturn(Logger.getGlobal()).when(context).getLogger();
doReturn("addData").when(context).getFunctionName();
// Invoke
final HttpResponseMessage ret = func.addDataRun(req, context);
// Verify
assertEquals(ret.getStatus(), HttpStatus.CREATED);
}
}
For this case instead of test configuration addData the actual bean is called from DataFunctions class. Also the database connection is also created when it should use the mocked bean from my test configuration. Can somebody please point out what is wrong in my test configuration?
I was able to resolve the first part of cosmos db config loading by marking it with
#Configuration
#EnableDocumentDbRepositories
#Profile("!test")
public class CosmosDBConfig extends AbstractDocumentDbConfiguration {
...
}
Also had to mark the repository bean as optional in the service.
public class MyService {
#Autowired(required = false)
private MyRepository myRepo;
}
Didn't use any spring boot configuration other than this.
#ActiveProfiles("test")
public class FunctionTest {
...
}
For the second part of providing mock version of Mock handlers, I simply made the test config file as spring application as below.
#SpringBootApplication
#ComponentScan(basePackages = { "com.boeing.da.helix.utm.traffic" })
#Profile("test")
public class TestConfiguration {
public static void main(final String[] args) {
SpringApplication.run(TestConfiguration.class, args);
}
#Bean(name="addData")
#Primary
public Consumer<GenericMessage<Optional<String>>> addData() {
return req -> {
System.out.println("data mock");
};
}
}
and made use of this constructor from azure functions library in spring cloud in my constructor
public class AppFunctions
extends AzureSpringBootRequestHandler<GenericMessage<Optional<String>>, List<Data>> {
public AppFunctions(Class<?> configurationClass) {
super(configurationClass);
}
}
public AzureSpringBootRequestHandler(Class<?> configurationClass) {
super(configurationClass);
}
Hope it helps someone.

Spring Integration FTP Outbound Gateway console output

In the Spring Integration documentation example for ftp outbound gateway with Java configuration (16.8.1), how do I log the payload of the reply channel to the console?
Add a WireTap #Bean and wire its MessageChannel to a LoggingHandler.
Add the wire tap as a ChannelInterceptor to the gateway output channel.
Or, use the .wiretap() when using the Java DSL.
Documentation here.
EDIT
Java Config:
#SpringBootApplication
public class So49308064Application {
public static void main(String[] args) {
SpringApplication.run(So49308064Application.class, args);
}
#Bean
public ApplicationRunner runner (Gate gate) {
return args -> {
List<String> list = gate.list("foo");
System.out.println("Result:" + list);
};
}
#ServiceActivator(inputChannel = "ftpLS")
#Bean
public FtpOutboundGateway getGW() {
FtpOutboundGateway gateway = new FtpOutboundGateway(sf(), "ls", "payload");
gateway.setOption(Option.NAME_ONLY);
gateway.setOutputChannelName("results");
return gateway;
}
#Bean
public MessageChannel results() {
DirectChannel channel = new DirectChannel();
channel.addInterceptor(tap());
return channel;
}
#Bean
public WireTap tap() {
return new WireTap("logging");
}
#ServiceActivator(inputChannel = "logging")
#Bean
public LoggingHandler logger() {
LoggingHandler logger = new LoggingHandler(Level.INFO);
logger.setLogExpressionString("'Files:' + payload");
return logger;
}
#Bean
public DefaultFtpSessionFactory sf() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost("...");
sf.setUsername("...");
sf.setPassword("...");
return sf;
}
#MessagingGateway(defaultRequestChannel = "ftpLS", defaultReplyChannel = "results")
public interface Gate {
List<String> list(String directory);
}
}
.
2018-03-29 09:04:20.383 INFO 15158 --- [ main] o.s.integration.handler.LoggingHandler
: Files:bar.tx,bar.txt,baz.txt
Result:[bar.tx, bar.txt, baz.txt]
Java DSL:
#SpringBootApplication
public class So49308064Application {
public static void main(String[] args) {
SpringApplication.run(So49308064Application.class, args);
}
#Bean
public ApplicationRunner runner (Gate gate) {
return args -> {
List<String> list = gate.list("foo");
System.out.println("Result:" + list);
};
}
#Bean
public IntegrationFlow flow() {
return f -> f
.handle((Ftp.outboundGateway(sf(), "ls", "payload").options(Option.NAME_ONLY)))
.log(Level.INFO, "lsResult", "payload")
.bridge(); // needed, otherwise log ends the flow.
}
#Bean
public DefaultFtpSessionFactory sf() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost("...");
sf.setUsername("...");
sf.setPassword("...");
return sf;
}
#MessagingGateway(defaultRequestChannel = "flow.input")
public interface Gate {
List<String> list(String directory);
}
}
.
2018-03-29 09:12:28.991 INFO 16638 --- [ main] lsResult
: [bar.tx, bar.txt, baz.txt]
Result:[bar.tx, bar.txt, baz.txt]

Resources