HDP-2.5.3.0.
A custom processor uses the State api to persist some data.
try {
stateMap = stateManager.getState(Scope.CLUSTER);
stateMapProperties = new HashMap<>(stateMap.toMap());
logger.debug("Retrieved the statemap : " + stateMapProperties);
...
...
...
} catch (IOException ioe) {
logger.error("Couldn't load the state map", ioe);
throw new ProcessException(ioe);
}
The processor works fine on my local machine's NiFi but when I put it on our (kerberized)dev cluster which has 2 NiFi nodes, it fails with the following error(Exception) :
java.io.IOException: Failed to obtain value from ZooKeeper for component with ID d7fff389-015a-1000-ffff-ffffd04d1279 with exception code NOAUTH
at org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider.getState(ZooKeeperStateProvider.java:420) ~[na:na]
at org.apache.nifi.controller.state.StandardStateManager.getState(StandardStateManager.java:63) ~[na:na]
at com.datalake.processors.SQLServerCDCProcessor.getDataFromChangeTables(SQLServerCDCProcessor.java:480) [nifi-NiFiCDCPoC-processors-1.0-SNAPSHOT.jar:1.0-SNAPSHOT]
at com.datalake.processors.SQLServerCDCProcessor.onTrigger(SQLServerCDCProcessor.java:191) [nifi-NiFiCDCPoC-processors-1.0-SNAPSHOT.jar:1.0-SNAPSHOT]
at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27) [nifi-api-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1099) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:132) [nifi-framework-core-1.1.2.jar:1.1.2]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_112]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) [na:1.8.0_112]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) [na:1.8.0_112]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) [na:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_112]
Caused by: org.apache.zookeeper.KeeperException$NoAuthException: KeeperErrorCode = NoAuth for /nifi/components/d7fff389-015a-1000-ffff-ffffd04d1279
at org.apache.zookeeper.KeeperException.create(KeeperException.java:113) ~[na:na]
at org.apache.zookeeper.KeeperException.create(KeeperException.java:51) ~[na:na]
at org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1155) ~[na:na]
at org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1184) ~[na:na]
at org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider.getState(ZooKeeperStateProvider.java:403) ~[na:na]
.
.
.
.
.
.
.
.
.
org.apache.nifi.processor.exception.ProcessException: java.io.IOException: Failed to obtain value from ZooKeeper for component with ID d7fff389-015a-1000-ffff-ffffd04d1279 with exception code NOAUTH
at com.datalake.processors.SQLServerCDCProcessor.getDataFromChangeTables(SQLServerCDCProcessor.java:493) ~[nifi-NiFiCDCPoC-processors-1.0-SNAPSHOT.jar:1.0-SNAPSHOT]
at com.datalake.processors.SQLServerCDCProcessor.onTrigger(SQLServerCDCProcessor.java:191) ~[nifi-NiFiCDCPoC-processors-1.0-SNAPSHOT.jar:1.0-SNAPSHOT]
at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27) [nifi-api-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1099) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:132) [nifi-framework-core-1.1.2.jar:1.1.2]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_112]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) [na:1.8.0_112]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) [na:1.8.0_112]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) [na:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_112]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_112]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_112]
Caused by: java.io.IOException: Failed to obtain value from ZooKeeper for component with ID d7fff389-015a-1000-ffff-ffffd04d1279 with exception code NOAUTH
at org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider.getState(ZooKeeperStateProvider.java:420) ~[na:na]
at org.apache.nifi.controller.state.StandardStateManager.getState(StandardStateManager.java:63) ~[na:na]
at com.datalake.processors.SQLServerCDCProcessor.getDataFromChangeTables(SQLServerCDCProcessor.java:480) ~[nifi-NiFiCDCPoC-processors-1.0-SNAPSHOT.jar:1.0-SNAPSHOT]
... 13 common frames omitted
Caused by: org.apache.zookeeper.KeeperException$NoAuthException: KeeperErrorCode = NoAuth for /nifi/components/d7fff389-015a-1000-ffff-ffffd04d1279
at org.apache.zookeeper.KeeperException.create(KeeperException.java:113) ~[na:na]
at org.apache.zookeeper.KeeperException.create(KeeperException.java:51) ~[na:na]
at org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1155) ~[na:na]
at org.apache.zookeeper.ZooKeeper.getData(ZooKeeper.java:1184) ~[na:na]
at org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider.getState(ZooKeeperStateProvider.java:403) ~[na:na]
... 15 common frames omitted
Following are the entries in the state-management.xml
<cluster-provider>
<id>zk-provider</id>
<class>org.apache.nifi.controller.state.providers.zookeeper.ZooKeeperStateProvider</class>
<property name="Connect String">l4373t.sss.se.scania.com:2181,l4283t.sss.se.scania.com:2181,l4284t.sss.se.scania.com:2181</property>
<property name="Root Node">/nifi</property>
<property name="Session Timeout">10 seconds</property>
<property name="Access Control">CreatorOnly</property>
</cluster-provider>
Any ideas ?
*****Edit-1*****
Adding the zk jaas configuration.
bash-4.2$ cat zookeeper-jaas.conf
Client {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
keyTab="/usr/local/nifi/keys/nifi_l4513t.sss.se.com.keytab"
storeKey=true
useTicketCache=true
principal="nifi/l4513t.sss.se.com#GLOBAL.SCD.COM";
};
The entry(as 'java.arg.16') in the bootstrap.conf file :
bash-4.2$ vi bootstrap.conf
#
# Java command to use when running NiFi
java=java
# Username to use when running NiFi. This value will be ignored on Windows.
run.as=
# Configure where NiFi's lib and conf directories live
lib.dir=./lib
conf.dir=./conf
# How long to wait after telling NiFi to shutdown before explicitly killing the Process
graceful.shutdown.seconds=20
# Disable JSR 199 so that we can use JSP's without running a JDK
java.arg.1=-Dorg.apache.jasper.compiler.disablejsr199=true
# JVM memory settings
java.arg.2=-Xms1024m
java.arg.3=-Xmx2048m
# Enable Remote Debugging
#java.arg.debug=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8000
java.arg.4=-Djava.net.preferIPv4Stack=true
# allowRestrictedHeaders is required for Cluster/Node communications to work properly
java.arg.5=-Dsun.net.http.allowRestrictedHeaders=true
java.arg.6=-Djava.protocol.handler.pkgs=sun.net.www.protocol
java.arg.7=-Dorg.apache.nifi.bootstrap.config.log.dir=/var/log/nifi
# The G1GC is still considered experimental but has proven to be very advantageous in providing great
# performance without significant "stop-the-world" delays.
java.arg.13=-XX:+UseG1GC
#Set headless mode by default
java.arg.14=-Djava.awt.headless=true
java.arg.15=-Djava.security.auth.login.config=/usr/local/nifi/conf/kafka-jaas.conf
java.arg.16=-Djava.security.auth.login.config=/usr/local/nifi/conf/zookeeper-jaas.conf
# Master key in hexadecimal format for encrypted sensitive configuration values
nifi.bootstrap.sensitive.key=
###
# Notification Services for notifying interested parties when NiFi is stopped, started, dies
###
*****Edit-2***** Providing the existing kafka-jaas.conf
bash-4.2$ cat kafka-jaas.conf
KafkaServer {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
storeKey=true
renewTicket=true
useTicketCache=true
serviceName="kafka"
keyTab="/usr/local/nifi/keys/nifi_l4513t.sss.se.com.keytab"
principal="nifi/l4513t.sss.se.com#GLOBAL.SCD.COM";
};
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
useTicketCache=true
renewTicket=true
serviceName="kafka"
keyTab="/usr/local/nifi/keys/nifi_l4513t.sss.se.com.keytab"
principal="nifi/l4513t.sss.se.com#GLOBAL.SCD.COM";
};
Client {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
storeKey=true
useTicketCache=true
serviceName="kafka"
keyTab="/usr/local/nifi/keys/nifi_l4513t.sss.se.com.keytab"
principal="nifi/l4513t.sss.se.com#GLOBAL.SCD.COM";
};
If you are talking to a Kerberized ZooKeeper then there is additional config required beyond the state-management.xml. Take a look at the Admin Guide section on securing ZooKeeper, specifically the section "Kerberizing NiFi's ZooKeeper Client":
https://nifi.apache.org/docs/nifi-docs/html/administration-guide.html#zk_kerberos_client
EDIT:
This article shows an example of the different JAAS scenarios:
https://community.hortonworks.com/content/kbentry/28180/how-to-configure-hdf-12-to-send-to-and-get-data-fr.html
Most of the examples in article use an embedded ZK, so taking that out I think you would need something like:
Client {
com.sun.security.auth.module.Krb5LoginModule required
useKeyTab=true
keyTab="./conf/nifi.keytab"
storeKey=true
useTicketCache=false
principal="nifi#EXAMPLE.COM”;
};
KafkaClient {
com.sun.security.auth.module.Krb5LoginModule required
useTicketCache=true
renewTicket=true
serviceName="kafka"
useKeyTab=true
keyTab="./conf/nifi.keytab"
principal="nifi#EXAMPLE.COM";
};
Related
I am trying to publish message using gcp pubsub, but getting below exception at publishers end. While sending message through pubsuboutboundgatewayinterface this exception is happing.
2019-12-19 18:47:27.408 WARN 2856 --- [pool-1-thread-2] o.s.c.g.p.c.p.PubSubPublisherTemplate : Publishing to topic_name topic failed.
com.google.api.gax.rpc.UnavailableException: io.grpc.StatusRuntimeException: UNAVAILABLE: io exception
at com.google.api.gax.rpc.ApiExceptionFactory.createException(ApiExceptionFactory.java:69)
at com.google.api.gax.grpc.GrpcApiExceptionFactory.create(GrpcApiExceptionFactory.java:72)
at com.google.api.gax.grpc.GrpcApiExceptionFactory.create(GrpcApiExceptionFactory.java:60)
at com.google.api.gax.grpc.GrpcExceptionCallable$ExceptionTransformingFuture.onFailure(GrpcExceptionCallable.java:97)
at com.google.api.core.ApiFutures$1.onFailure(ApiFutures.java:68)
at com.google.common.util.concurrent.Futures$CallbackListener.run(Futures.java:1070)
at com.google.common.util.concurrent.DirectExecutor.execute(DirectExecutor.java:30)
at com.google.common.util.concurrent.AbstractFuture.executeListener(AbstractFuture.java:1138)
at com.google.common.util.concurrent.AbstractFuture.addListener(AbstractFuture.java:707)
at com.google.common.util.concurrent.ForwardingListenableFuture.addListener(ForwardingListenableFuture.java:45)
at com.google.api.core.ApiFutureToListenableFuture.addListener(ApiFutureToListenableFuture.java:52)
at com.google.common.util.concurrent.Futures.addCallback(Futures.java:1051)
at com.google.api.core.ApiFutures.addCallback(ApiFutures.java:63)
at com.google.api.gax.grpc.GrpcExceptionCallable.futureCall(GrpcExceptionCallable.java:67)
at com.google.api.gax.rpc.AttemptCallable.call(AttemptCallable.java:81)
at com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly(TrustedListenableFutureTask.java:125)
at com.google.common.util.concurrent.InterruptibleTask.run(InterruptibleTask.java:69)
at com.google.common.util.concurrent.TrustedListenableFutureTask.run(TrustedListenableFutureTask.java:78)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: io.grpc.StatusRuntimeException: UNAVAILABLE: io exception
at io.grpc.Status.asRuntimeException(Status.java:526)
at io.grpc.stub.ClientCalls$UnaryStreamToFuture.onClose(ClientCalls.java:482)
at io.grpc.PartialForwardingClientCallListener.onClose(PartialForwardingClientCallListener.java:39)
at io.grpc.ForwardingClientCallListener.onClose(ForwardingClientCallListener.java:23)
at io.grpc.ForwardingClientCallListener$SimpleForwardingClientCallListener.onClose(ForwardingClientCallListener.java:40)
at io.grpc.internal.CensusStatsModule$StatsClientInterceptor$1$1.onClose(CensusStatsModule.java:678)
at io.grpc.PartialForwardingClientCallListener.onClose(PartialForwardingClientCallListener.java:39)
at io.grpc.ForwardingClientCallListener.onClose(ForwardingClientCallListener.java:23)
at io.grpc.ForwardingClientCallListener$SimpleForwardingClientCallListener.onClose(ForwardingClientCallListener.java:40)
at io.grpc.internal.CensusTracingModule$TracingClientInterceptor$1$1.onClose(CensusTracingModule.java:397)
at io.grpc.internal.ClientCallImpl.closeObserver(ClientCallImpl.java:459)
at io.grpc.internal.ClientCallImpl.access$300(ClientCallImpl.java:63)
at io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl.close(ClientCallImpl.java:546)
at io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl.access$600(ClientCallImpl.java:467)
at io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl$1StreamClosed.runInContext(ClientCallImpl.java:584)
at io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
at io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
... 7 common frames omitted
Caused by: java.io.IOException: An existing connection was forcibly closed by the remote host
at sun.nio.ch.SocketDispatcher.read0(Native Method)
at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43)
at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
at sun.nio.ch.IOUtil.read(IOUtil.java:192)
at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:380)
at io.grpc.netty.shaded.io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes(PooledUnsafeDirectByteBuf.java:288)
at io.grpc.netty.shaded.io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1128)
at io.grpc.netty.shaded.io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:347)
at io.grpc.netty.shaded.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:148)
at io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644)
at io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579)
at io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496)
at io.grpc.netty.shaded.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458)
at io.grpc.netty.shaded.io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:897)
at io.grpc.netty.shaded.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
... 1 common frames omitted
Is it some port issue ? I am doing this for first time. Can anyone please help.
I have deployed few Spring boot applications using spring cloud Netflix OSS and configured Spring boot Admin to monitor those application connecting to Eureka server. But for two of the applications, I am getting below error. Could you please let me know what could be the cause of this error and how to mitigate this.
This endpoint health URL is provided by Spring boot actuator and output of the heath URL is {"description":"Spring Cloud Eureka Discovery Client","status":"UP"}
Error in the log
2017-10-02 18:29:31.790 ERROR 5976 --- [DiscoveryClient-CacheRefreshExecutor-0] d.c.b.a.d.ApplicationDiscoveryListener : Couldn't register application for service org.springframework.cloud.netflix.eureka.EurekaDiscoveryClient$EurekaServiceInstance#3e988b86
java.lang.IllegalArgumentException: Illegal character in path at index 61: http://xxxx:56412/manage/health
at java.net.URI.create(URI.java:852) ~[na:1.8.0_131]
at de.codecentric.boot.admin.discovery.EurekaServiceInstanceConverter.getHealthUrl(EurekaServiceInstanceConverter.java:46) ~[spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at de.codecentric.boot.admin.discovery.DefaultServiceInstanceConverter.convert(DefaultServiceInstanceConverter.java:64) ~[spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at de.codecentric.boot.admin.discovery.ApplicationDiscoveryListener.register(ApplicationDiscoveryListener.java:138) [spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at de.codecentric.boot.admin.discovery.ApplicationDiscoveryListener.discover(ApplicationDiscoveryListener.java:94) [spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at de.codecentric.boot.admin.discovery.ApplicationDiscoveryListener.discoverIfNeeded(ApplicationDiscoveryListener.java:85) [spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at de.codecentric.boot.admin.discovery.ApplicationDiscoveryListener.onApplicationEvent(ApplicationDiscoveryListener.java:80) [spring-boot-admin-server-1.5.0.jar!/:1.5.0]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_131]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_131]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_131]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_131]
at org.springframework.context.event.ApplicationListenerMethodAdapter.doInvoke(ApplicationListenerMethodAdapter.java:253) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.event.ApplicationListenerMethodAdapter.processEvent(ApplicationListenerMethodAdapter.java:174) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.event.ApplicationListenerMethodAdapter.onApplicationEvent(ApplicationListenerMethodAdapter.java:137) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.event.SimpleApplicationEventMulticaster.invokeListener(SimpleApplicationEventMulticaster.java:167) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:139) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:393) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:347) [spring-context-4.3.9.RELEASE.jar!/:4.3.9.RELEASE]
at org.springframework.cloud.netflix.eureka.CloudEurekaClient.onCacheRefreshed(CloudEurekaClient.java:98) [spring-cloud-netflix-eureka-client-1.3.1.RELEASE.jar!/:1.3.1.RELEASE]
at com.netflix.discovery.DiscoveryClient.fetchRegistry(DiscoveryClient.java:943) [eureka-client-1.6.2.jar!/:1.6.2]
at com.netflix.discovery.DiscoveryClient.refreshRegistry(DiscoveryClient.java:1451) [eureka-client-1.6.2.jar!/:1.6.2]
at com.netflix.discovery.DiscoveryClient$CacheRefreshThread.run(DiscoveryClient.java:1418) [eureka-client-1.6.2.jar!/:1.6.2]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_131]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_131]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_131]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_131]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_131]
Caused by: java.net.URISyntaxException: Illegal character in path at index 61: http://xxxxx:56412/manage/health
at java.net.URI$Parser.fail(URI.java:2848) ~[na:1.8.0_131]
at java.net.URI$Parser.checkChars(URI.java:3021) ~[na:1.8.0_131]
at java.net.URI$Parser.parseHierarchical(URI.java:3105) ~[na:1.8.0_131]
at java.net.URI$Parser.parse(URI.java:3053) ~[na:1.8.0_131]
at java.net.URI.<init>(URI.java:588) ~[na:1.8.0_131]
at java.net.URI.create(URI.java:850) ~[na:1.8.0_131]
... 26 common frames omitted
There was an extra space at the end of the URL
eureka.instance.healthCheckUrlPath=${management.context-path}/health
It should be
eureka.instance.healthCheckUrlPath=${management.context-path}/health
Reviewing code the error gets thrown on undefined datatype that needs to be converted for avro schema. however the only column I am selecting is NVARCHAR(5000) type which is there in the code.
2017-04-21 01:33:51,446 WARN [Timer-Driven Process Thread-1] o.a.n.c.t.ContinuallyRunProcessorTask
java.lang.IllegalArgumentException: createSchema: Unknown SQL type 2011 cannot be converted to Avro type
at org.apache.nifi.processors.standard.util.JdbcCommon.createSchema(JdbcCommon.java:349) ~[na:na]
at org.apache.nifi.processors.standard.util.JdbcCommon.convertToAvroStream(JdbcCommon.java:92) ~[na:na]
at org.apache.nifi.processors.standard.util.JdbcCommon.convertToAvroStream(JdbcCommon.java:87) ~[na:na]
at org.apache.nifi.processors.standard.util.JdbcCommon.convertToAvroStream(JdbcCommon.java:77) ~[na:na]
at org.apache.nifi.processors.standard.ExecuteSQL$2.process(ExecuteSQL.java:205) ~[na:na]
at org.apache.nifi.controller.repository.StandardProcessSession.write(StandardProcessSession.java:2329) ~[nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.processors.standard.ExecuteSQL.onTrigger(ExecuteSQL.java:199) ~[na:na]
at org.apache.nifi.processor.AbstractProcessor.onTrigger(AbstractProcessor.java:27) ~[nifi-api-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.StandardProcessorNode.onTrigger(StandardProcessorNode.java:1099) ~[nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:136) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.tasks.ContinuallyRunProcessorTask.call(ContinuallyRunProcessorTask.java:47) [nifi-framework-core-1.1.2.jar:1.1.2]
at org.apache.nifi.controller.scheduling.TimerDrivenSchedulingAgent$1.run(TimerDrivenSchedulingAgent.java:132) [nifi-framework-core-1.1.2.jar:1.1.2]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_102]
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) [na:1.8.0_102]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180) [na:1.8.0_102]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294) [na:1.8.0_102]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_102]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_102]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_102]
In the JDBC API from JDK8, NCLOB is 2011 and NVARCHAR is -9:
public static final int NCLOB = 2011;
public static final int NVARCHAR = -9;
It looks like the driver you are using is returning 2011 for the column even though you believe the column is NVARCHAR. I'm not totally sure but that seems like incorrect behavior from the hana driver.
It could probably be handled on the NiFi side of things by adding NCLOB to this case statement in JdbcCommon:
case CHAR:
case LONGNVARCHAR:
case LONGVARCHAR:
case NCHAR:
case NVARCHAR:
case VARCHAR:
case CLOB:
I am getting kafka exception when tried kafka topic as source
Here is how i created stream
stream create --definition ":myDestination > log" --name ingest_from_broker
stream deploy ingest_from_broker --properties "spring.cloud.stream.bindings.input.consumer.headerMode=raw"
When I am running it I am getting exception at log file
java.lang.StringIndexOutOfBoundsException: String index out of range: 113
at java.lang.String.checkBounds(String.java:385) ~[na:1.8.0_66]
at java.lang.String.<init>(String.java:425) ~[na:1.8.0_66]
at org.springframework.cloud.stream.binder.EmbeddedHeadersMessageConverter.oldExtractHeaders(EmbeddedHeadersMessageConverter.java:132) ~[spring-cloud-stream-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.cloud.stream.binder.EmbeddedHeadersMessageConverter.extractHeaders(EmbeddedHeadersMessageConverter.java:105) ~[spring-cloud-stream-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.cloud.stream.binder.AbstractBinder.extractMessageValues(AbstractBinder.java:153) ~[spring-cloud-stream-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder$ReceivingHandler.handleRequestMessage(KafkaMessageChannelBinder.java:698) [spring-cloud-stream-binder-kafka-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:99) [spring-integration-core-4.2.4.RELEASE.jar!/:na]
at org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:127) [spring-integration-core-4.2.4.RELEASE.jar!/:na]
at org.springframework.integration.channel.FixedSubscriberChannel.send(FixedSubscriberChannel.java:69) [spring-integration-core-4.2.4.RELEASE.jar!/:na]
at org.springframework.integration.channel.FixedSubscriberChannel.send(FixedSubscriberChannel.java:63) [spring-integration-core-4.2.4.RELEASE.jar!/:na]
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:115) [spring-messaging-4.2.7.RELEASE.jar!/:4.2.7.RELEASE]
at org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:45) [spring-messaging-4.2.7.RELEASE.jar!/:4.2.7.RELEASE]
at org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:105) [spring-messaging-4.2.7.RELEASE.jar!/:4.2.7.RELEASE]
at org.springframework.integration.endpoint.MessageProducerSupport.sendMessage(MessageProducerSupport.java:105) [spring-integration-core-4.2.4.RELEASE.jar!/:na]
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter.access$300(KafkaMessageDrivenChannelAdapter.java:43) [spring-integration-kafka-1.3.1.RELEASE.jar!/:na]
at org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter$AutoAcknowledgingChannelForwardingMessageListener.doOnMessage(KafkaMessageDrivenChannelAdapter.java:171) [spring-integration-kafka-1.3.1.RELEASE.jar!/:na]
at org.springframework.integration.kafka.listener.AbstractDecodingMessageListener.onMessage(AbstractDecodingMessageListener.java:50) [spring-integration-kafka-1.3.1.RELEASE.jar!/:na]
at org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder$4$1.doWithRetry(KafkaMessageChannelBinder.java:516) [spring-cloud-stream-binder-kafka-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.retry.support.RetryTemplate.doExecute(RetryTemplate.java:276) [spring-retry-1.1.3.RELEASE.jar!/:na]
at org.springframework.retry.support.RetryTemplate.execute(RetryTemplate.java:157) [spring-retry-1.1.3.RELEASE.jar!/:na]
at org.springframework.cloud.stream.binder.kafka.KafkaMessageChannelBinder$4.onMessage(KafkaMessageChannelBinder.java:513) [spring-cloud-stream-binder-kafka-1.0.2.RELEASE.jar!/:1.0.2.RELEASE]
at org.springframework.integration.kafka.listener.QueueingMessageListenerInvoker$KafkaMessageDispatchingSubscriber.onNext(QueueingMessageListenerInvoker.java:221) [spring-integration-kafka-1.3.1.RELEASE.jar!/:na]
at org.springframework.integration.kafka.listener.QueueingMessageListenerInvoker$KafkaMessageDispatchingSubscriber.onNext(QueueingMessageListenerInvoker.java:209) [spring-integration-kafka-1.3.1.RELEASE.jar!/:na]
at reactor.core.processor.util.RingBufferSubscriberUtils.route(RingBufferSubscriberUtils.java:67) [reactor-core-2.0.8.RELEASE.jar!/:na]
at reactor.core.processor.RingBufferProcessor$BatchSignalProcessor.run(RingBufferProcessor.java:789) [reactor-core-2.0.8.RELEASE.jar!/:na]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_66]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_66]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_66]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_66]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_66]
I saw some threads where it is mentioned that setting up this property - 'spring.cloud.stream.bindings.input.consumer.headerMode=raw' will help but somehow this is not working.
Try either
stream create --definition ":myDestination > log --spring.cloud.stream.bindings.input.consumer.headerMode=raw" --name ingest_from_broker
or
stream deploy ingest_from_broker --properties "apps.log.spring.cloud.stream.bindings.input.consumer.headerMode=raw"
i.e. either the property must be specified on the application directly during Stream definition, or if supplied at deployment time, it must indicate what application does it apply to.
I am implementing a Connector with a Transformer in Mule DevKit (3.7 - Latest). I am successful in building (maven) and installing in Studio. The functionality of the Transformer is to load the class that is being passed as an argument (from payload) in runtime and access its methods to set some objects and construct a message format specific to a 3rd party protocol. When I run the application, it starts and deploys successfully; but getting Classnotfound exception. Kindly suggest/recommend a solution.
Code in my transformer class; where wsClassName is being passed as org.aa.bb.cc.MemberSearchRequestInfo (refer logs)
woClassObject = Class.forName(wsClassName).newInstance();
Mule project has the MemberSearchRequestInfo class in the specified package under src/main/java
Below is the logs.
2016-01-12 20:54:05,555 [[testzzmbrview1].HTTP_Listener_Configuration.worker.01] ERROR org.mule.exception.DefaultMessagingExceptionStrategy -
********************************************************************************
Message : Failed to transform from "java.util.HashMap" to "java.lang.String"
Type : org.mule.api.transformer.TransformerException
Code : MULE_ERROR-109
JavaDoc : http://www.mulesoft.org/docs/site/current3/apidocs/org/mule/api/transformer/TransformerException.html
Transformer : EnquiryMapToKkkkTransformer{this=c0fbf40, name='EnquiryMapToKkkkTransformer', ignoreBadInput=false, returnClass=SimpleDataType{type=java.lang.String, mimeType='*/*', encoding='null'}, sourceTypes=[SimpleDataType{type=java.util.HashMap, mimeType='*/*', encoding='null'}]}
********************************************************************************
Exception stack is:
1. org.aa.bb.cc.MemberSearchRequestInfo (java.lang.ClassNotFoundException)
java.net.URLClassLoader$1:-1 (null)
2. Failed to transform from "java.util.HashMap" to "java.lang.String" (org.mule.api.transformer.TransformerException)
org.mule.modules.xxxcore.transformers.EnquiryMapToKkkkTransformer:43 (http://www.mulesoft.org/docs/site/current3/apidocs/org/mule/api/transformer/TransformerException.html)
********************************************************************************
Root Exception stack trace:
java.lang.ClassNotFoundException: org.aa.bb.cc.MemberSearchRequestInfo
at java.net.URLClassLoader$1.run(Unknown Source)
at java.net.URLClassLoader$1.run(Unknown Source)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(Unknown Source)
at org.mule.module.launcher.FineGrainedControlClassLoader.findClass(FineGrainedControlClassLoader.java:175)
at org.mule.module.launcher.FineGrainedControlClassLoader.loadClass(FineGrainedControlClassLoader.java:119)
at java.lang.ClassLoader.loadClass(Unknown Source)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Unknown Source)
at org.ddd.zz.common.KkkkRequestFormatter.applyFieldRule(KkkkRequestFormatter.java:223)
at org.ddd.zz.common.KkkkRequestFormatter.getKkkkEnquiryRequest(KkkkRequestFormatter.java:309)
at org.mule.modules.xxxcore.xxxcoreConnector.enquiryMapToKkkk(xxxcoreConnector.java:124)
at org.mule.modules.xxxcore.transformers.EnquiryMapToKkkkTransformer.doTransform(EnquiryMapToKkkkTransformer.java:41)
at org.mule.transformer.AbstractTransformer.transform(AbstractTransformer.java:415)
at org.mule.transformer.AbstractTransformer.transform(AbstractTransformer.java:366)
at org.mule.DefaultMuleMessage.transformMessage(DefaultMuleMessage.java:1604)
at org.mule.DefaultMuleMessage.applyAllTransformers(DefaultMuleMessage.java:1507)
at org.mule.DefaultMuleMessage.applyTransformers(DefaultMuleMessage.java:1485)
at org.mule.DefaultMuleMessage.applyTransformers(DefaultMuleMessage.java:1477)
at org.mule.transformer.AbstractTransformer.process(AbstractTransformer.java:114)
at org.mule.execution.ExceptionToMessagingExceptionExecutionInterceptor.execute(ExceptionToMessagingExceptionExecutionInterceptor.java:24)
at org.mule.execution.MessageProcessorNotificationExecutionInterceptor.execute(MessageProcessorNotificationExecutionInterceptor.java:107)
at org.mule.execution.MessageProcessorExecutionTemplate.execute(MessageProcessorExecutionTemplate.java:44)
at org.mule.processor.BlockingProcessorExecutor.executeNext(BlockingProcessorExecutor.java:94)
at org.mule.processor.BlockingProcessorExecutor.execute(BlockingProcessorExecutor.java:56)
at org.mule.execution.ExceptionToMessagingExceptionExecutionInterceptor.execute(ExceptionToMessagingExceptionExecutionInterceptor.java:24)
at org.mule.execution.MessageProcessorNotificationExecutionInterceptor.execute(MessageProcessorNotificationExecutionInterceptor.java:107)
at org.mule.execution.MessageProcessorExecutionTemplate.execute(MessageProcessorExecutionTemplate.java:44)
at org.mule.routing.AbstractSelectiveRouter.processEventWithProcessor(AbstractSelectiveRouter.java:303)
at org.mule.routing.AbstractSelectiveRouter.routeWithProcessors(AbstractSelectiveRouter.java:293)
at org.mule.routing.AbstractSelectiveRouter.process(AbstractSelectiveRouter.java:193)
at org.mule.execution.ExceptionToMessagingExceptionExecutionInterceptor.execute(ExceptionToMessagingExceptionExecutionInterceptor.java:24)
at org.mule.execution.MessageProcessorNotificationExecutionInterceptor.execute(MessageProcessorNotificationExecutionInterceptor.java:107)
at org.mule.execution.MessageProcessorExecutionTemplate.execute(MessageProcessorExecutionTemplate.java:44)
at org.mule.processor.BlockingProcessorExecutor.executeNext(BlockingProcessorExecutor.java:85)
at org.mule.processor.BlockingProcessorExecutor.execute(BlockingProcessorExecutor.java:56)
at org.mule.execution.ExceptionToMessagingExceptionExecutionInterceptor.execute(ExceptionToMessagingExceptionExecutionInterceptor.java:24)
at org.mule.execution.MessageProcessorExecutionTemplate.execute(MessageProcessorExecutionTemplate.java:44)
at org.mule.processor.BlockingProcessorExecutor.executeNext(BlockingProcessorExecutor.java:94)
at org.mule.processor.BlockingProcessorExecutor.execute(BlockingProcessorExecutor.java:56)
at org.mule.module.cxf.CxfInboundMessageProcessor.processNext(CxfInboundMessageProcessor.java:471)
at org.mule.module.cxf.MuleInvoker$1.process(MuleInvoker.java:100)
at org.mule.module.cxf.MuleInvoker$1.process(MuleInvoker.java:96)
at org.mule.execution.ExecuteCallbackInterceptor.execute(ExecuteCallbackInterceptor.java:16)
at org.mule.execution.CommitTransactionInterceptor.execute(CommitTransactionInterceptor.java:35)
at org.mule.execution.CommitTransactionInterceptor.execute(CommitTransactionInterceptor.java:22)
at org.mule.execution.HandleExceptionInterceptor.execute(HandleExceptionInterceptor.java:30)
at org.mule.execution.HandleExceptionInterceptor.execute(HandleExceptionInterceptor.java:14)
at org.mule.execution.BeginAndResolveTransactionInterceptor.execute(BeginAndResolveTransactionInterceptor.java:67)
at org.mule.execution.SuspendXaTransactionInterceptor.execute(SuspendXaTransactionInterceptor.java:50)
at org.mule.execution.RethrowExceptionInterceptor.execute(RethrowExceptionInterceptor.java:28)
at org.mule...
********************************************************************************