My storm bolt can not deserialize in cluster mode - spring-boot

I use springboot and storm to do a demo,it works in local mode,but report an error in cluster mode when i submit a jar
./storm jar storm-demo3-0.0.1-SNAPSHOT.jar org.springframework.boot.loader.JarLauncher simpleBoot
When i romove the springBoot and package with maven-compiler-plugin then it can work well
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
this is the error on supervisor
java.lang.RuntimeException: java.lang.ClassNotFoundException: com.fosung.share.stormdemo3.bolt.FilterBolt
at org.apache.storm.utils.Utils.javaDeserialize(Utils.java:259) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.utils.Utils.getSetComponentObject(Utils.java:507) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.task$get_task_object.invoke(task.clj:76) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.task$mk_task_data$fn__6524.invoke(task.clj:180) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.util$assoc_apply_self.invoke(util.clj:931) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.task$mk_task_data.invoke(task.clj:172) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.task$mk_task.invoke(task.clj:184) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.executor$mk_executor$fn__10662.invoke(executor.clj:379) ~[storm-core-1.2.2.jar:1.2.2]
at clojure.core$map$fn__4553.invoke(core.clj:2622) ~[clojure-1.7.0.jar:?]
at clojure.lang.LazySeq.sval(LazySeq.java:40) ~[clojure-1.7.0.jar:?]
at clojure.lang.LazySeq.seq(LazySeq.java:49) ~[clojure-1.7.0.jar:?]
at clojure.lang.RT.seq(RT.java:507) ~[clojure-1.7.0.jar:?]
at clojure.core$seq__4128.invoke(core.clj:137) ~[clojure-1.7.0.jar:?]
at clojure.core.protocols$seq_reduce.invoke(protocols.clj:30) ~[clojure-1.7.0.jar:?]
at clojure.core.protocols$fn__6506.invoke(protocols.clj:101) ~[clojure-1.7.0.jar:?]
at clojure.core.protocols$fn__6452$G__6447__6465.invoke(protocols.clj:13) ~[clojure-1.7.0.jar:?]
at clojure.core$reduce.invoke(core.clj:6519) ~[clojure-1.7.0.jar:?]
at clojure.core$into.invoke(core.clj:6600) ~[clojure-1.7.0.jar:?]
at org.apache.storm.daemon.executor$mk_executor.invoke(executor.clj:380) ~[storm-core-1.2.2.jar:1.2.2]
at org.apache.storm.daemon.worker$fn__11300$exec_fn__2470__auto__$reify__11302$iter__11307__11311$fn__11312.invoke(worker.clj:663) ~[storm-core-1.2.2.jar:1.2.2]
at clojure.lang.LazySeq.sval(LazySeq.java:40) ~[clojure-1.7.0.jar:?]
at clojure.lang.LazySeq.seq(LazySeq.java:49) ~[clojure-1.7.0.jar:?]
at clojure.lang.RT.seq(RT.java:507) ~[clojure-1.7.0.jar:?]
at clojure.core$seq__4128.invoke(core.clj:137) ~[clojure-1.7.0.jar:?]
at clojure.core$dorun.invoke(core.clj:3009) ~[clojure-1.7.0.jar:?]
at clojure.core$doall.invoke(core.clj:3025) ~[clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker$fn__11300$exec_fn__2470__auto__$reify__11302.run(worker.clj:663) ~[storm-core-1.2.2.jar:1.2.2]
at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_152]
at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_152]
at org.apache.storm.daemon.worker$fn__11300$exec_fn__2470__auto____11301.invoke(worker.clj:633) ~[storm-core-1.2.2.jar:1.2.2]
at clojure.lang.AFn.applyToHelper(AFn.java:178) ~[clojure-1.7.0.jar:?]
at clojure.lang.AFn.applyTo(AFn.java:144) ~[clojure-1.7.0.jar:?]
at clojure.core$apply.invoke(core.clj:630) ~[clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker$fn__11300$mk_worker__11391.doInvoke(worker.clj:605) [storm-core-1.2.2.jar:1.2.2]
at clojure.lang.RestFn.invoke(RestFn.java:512) [clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker$_main.invoke(worker.clj:798) [storm-core-1.2.2.jar:1.2.2]
at clojure.lang.AFn.applyToHelper(AFn.java:165) [clojure-1.7.0.jar:?]
at clojure.lang.AFn.applyTo(AFn.java:144) [clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker.main(Unknown Source) [storm-core-1.2.2.jar:1.2.2]
Caused by: java.lang.ClassNotFoundException: com.fosung.share.stormdemo3.bolt.FilterBolt
at java.net.URLClassLoader.findClass(URLClassLoader.java:381) ~[?:1.8.0_152]
at java.lang.ClassLoader.loadClass(ClassLoader.java:424) ~[?:1.8.0_152]
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338) ~[?:1.8.0_152]
at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ~[?:1.8.0_152]
at java.lang.Class.forName0(Native Method) ~[?:1.8.0_152]
at java.lang.Class.forName(Class.java:348) ~[?:1.8.0_152]
at java.io.ObjectInputStream.resolveClass(ObjectInputStream.java:683) ~[?:1.8.0_152]
at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1863) ~[?:1.8.0_152]
at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1746) ~[?:1.8.0_152]
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2037) ~[?:1.8.0_152]
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1568) ~[?:1.8.0_152]
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:428) ~[?:1.8.0_152]
at org.apache.storm.utils.Utils.javaDeserialize(Utils.java:253) ~[storm-core-1.2.2.jar:1.2.2]
... 38 more
2019-05-22 11:09:14.684 o.a.s.util main [ERROR] Halting process: ("Error on initialization")
java.lang.RuntimeException: ("Error on initialization")
at org.apache.storm.util$exit_process_BANG_.doInvoke(util.clj:341) [storm-core-1.2.2.jar:1.2.2]
at clojure.lang.RestFn.invoke(RestFn.java:423) [clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker$fn__11300$mk_worker__11391.doInvoke(worker.clj:605) [storm-core-1.2.2.jar:1.2.2]
at clojure.lang.RestFn.invoke(RestFn.java:512) [clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker$_main.invoke(worker.clj:798) [storm-core-1.2.2.jar:1.2.2]
at clojure.lang.AFn.applyToHelper(AFn.java:165) [clojure-1.7.0.jar:?]
at clojure.lang.AFn.applyTo(AFn.java:144) [clojure-1.7.0.jar:?]
at org.apache.storm.daemon.worker.main(Unknown Source) [storm-core-1.2.2.jar:1.2.2]
my pom.xml
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>1.2.2</version>
<!--<scope>provided</scope>-->
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-1.2-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-web</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>ring-cors</artifactId>
<groupId>ring-cors</groupId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
MyTopology
public class MyTopology {
public static void main(String[] args) {
System.out.println("MyTopology main start");
// 定义一个拓扑
TopologyBuilder builder = new TopologyBuilder();
// 设置1个Executeor(线程),默认一个
DataSpout dataSpout = new DataSpout();
builder.setSpout("spoutId", dataSpout);
// shuffleGrouping:表示是随机分组
// 设置1个Executeor(线程),和两个task
FilterBolt filterBolt = new FilterBolt();
InsertBolt insertBolt = new InsertBolt();
builder.setBolt("filterBolt", filterBolt).setNumTasks(1).allGrouping("spoutId", "spoutId");
builder.setBolt("insertBolt", insertBolt).setNumTasks(1).allGrouping("filterBolt", "spoutId");
Config conf = new Config();
try {
// 有参数时,表示向集群提交作业,并把第一个参数当做topology名称
// 没有参数时,本地提交
if (args != null && args.length > 0) {
System.out.println("运行远程模式");
StormSubmitter.submitTopology(args[0], conf, builder.createTopology());
} else {
// 启动本地模式
System.out.println("运行本地模式");
LocalCluster cluster = new LocalCluster();
cluster.submitTopology("TopologyApp", conf, builder.createTopology());
}
} catch (Exception e) {
System.out.println("storm启动失败!程序退出!");
System.exit(1);
e.printStackTrace();
}
// System.out.println("storm启动成功...");
}
}
My spout
public class DataSpout extends BaseRichSpout {
SpoutOutputCollector collector;
#Override
public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
this.collector = collector;
System.out.println("spout open");
}
#Override
public void nextTuple() {
/*try {
Thread.sleep(1000);
return;
} catch (InterruptedException e) {
e.printStackTrace();
}*/
System.out.println("spout nextTuple start");
int rndomn = (int)Math.random() * 1000;
collector.emit("spoutId", new Values(rndomn));
try {
TimeUnit.SECONDS.sleep(3);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
#Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream("spoutId", new Fields("spoutId"));
}
}
My bolt
public class FilterBolt extends BaseRichBolt {
OutputCollector collector;
#Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
}
#Override
public void execute(Tuple input) {
System.out.println("filter bolt start");
Integer o = (Integer) input.getValues().get(0);
if (o>10){
collector.emit("spoutId", new Values(o));
}
}
#Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
//定义下个bolt接收streamId
declarer.declareStream("spoutId", new Fields("spoutId"));
}
}

Spring (boot) doesn't fit nicely with Storm. Storm is a framework, meaning it is responsible for managing the lifecycle of some classes like your bolt. As Storm doesn't know anything about Spring, Spring's dependency injection doesn't work out of the box. It is possible to set up Spring to work on parts of a Storm application with e.g. task and worker hooks, which can allow you to create a Spring context in a Storm worker. I don't think I would recommend it unless you have a good reason to need Spring.
Regarding the error you're getting, Storm is failing to find one of your classes in the jar you're submitting. Since you didn't post your pom.xml for your Spring configuration, it's hard to tell, but maybe you're using a plugin that moves your classes around. When you submit a topology to Storm, Storm runs a couple of phases you should understand:
First you do storm jar com.yourcompany.yourMain. This starts a JVM on your local machine (or wherever you're running the command), which runs your topology setup, in your case MyTopology.main. The setup then serializes your spouts and bolts, and sends the jar and serialized topology to Nimbus (a separate JVM), which in turn sends it to the supervisors (yet another separate JVM). On the supervisors, the supervisor JVM boots up a number of worker JVMs to run your topology. Each worker JVM starts with a command like java -cp your-topology.jar org.apache.storm.Worker. The worker JVMs load the serialized topology, and the classes in your topology jar, and boot up threads to run your spouts and bolts.
These phases are most likely the reason it's failing for you. When you run the topology setup code, you're doing it with a Spring Boot command, so Spring Boot gets a chance to run. When the topology starts up on the worker machines, the JVMs are started with a regular old call to a non-Spring main method, so Spring doesn't get a chance to run.
If you decide not to use Spring, you can find a working example POM here.
Other links that may be of interest are an earlier answer and a project doing Spring integration for Storm.

Related

Use ElasticSearch client in non web application with Spring

I'm creating a non web application with Spring that will read data from Kafka and write it in ElasticSearch, i included the following dependency:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
And the following configuration class:
#Configuration
public class ElasticSearchConfig extends AbstractElasticsearchConfiguration{
#Bean
public RestHighLevelClient elasticsearchClient() {
final ClientConfiguration clientConfiguration = ClientConfiguration.builder().connectedTo("localhost:9200")
.build();
return RestClients.create(clientConfiguration).rest();
}
}
When i execute the following code:
#Override
public void run(String... args) throws Exception {
IndexRequest request = new IndexRequest("test-transactions");
request.id("2");
request.source("{\"name\":\"Sammie\"," + "\"lastname\":\"Goldner\"," + "\"username\":\"hugh.vonrueden\","
+ "\"amount\":9622235.2009}", XContentType.JSON);
client.index(request, RequestOptions.DEFAULT);
}
I get the following exception:
Caused by: java.lang.ClassNotFoundException: org.springframework.http.HttpHeaders
at java.net.URLClassLoader.findClass(URLClassLoader.java:382) ~[na:1.8.0_251]
at java.lang.ClassLoader.loadClass(ClassLoader.java:418) ~[na:1.8.0_251]
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355) ~[na:1.8.0_251]
at java.lang.ClassLoader.loadClass(ClassLoader.java:351) ~[na:1.8.0_251]
... 46 common frames omitted
If i include the following dependency the code works fine:
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
But it brings a lot of other functionalities that i don't want in my code, is there any other way to configure spring-boot-starter-data-elasticsearch in a non web application?
Thanks.

Server Sent Events with Spring Boot and WebFlux

I am working on Server Sent Events. I took help from
http://sinhamohit.com/writing/spring-boot-reactive-sse
https://github.com/mohitsinha/spring-boot-reactive-sse
The issue with the above example is everything is defined in one class. I am trying to do it different classes but it fails with exception:
2018-08-20 17:03:15.521 WARN 10964 --- [ main]
onfigReactiveWebServerApplicationContext : Exception encountered during
context initialization - cancelling refresh attempt:
org.springframework.context.ApplicationContextException: Unable to start
reactive web server; nested exception is
org.springframework.context.ApplicationContextException: Unable to start
ReactiveWebApplicationContext due to missing ReactiveWebServerFactory bean.
2018-08-20 17:03:15.599 ERROR 10964 --- [ main]
o.s.boot.SpringApplication : Application run failed
org.springframework.context.ApplicationContextException: Unable to start
reactive web server; nested exception is
org.springframework.context.ApplicationContextException: Unable to start
ReactiveWebApplicationContext due to missing ReactiveWebServerFactory bean.
at org.springframework.boot.web.reactive.context.ReactiveWebServerApplicationContext.onRefresh(ReactiveWebServerApplicationContext.java:76) ~[spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:544) ~[spring-context-5.0.8.RELEASE.jar:5.0.8.RELEASE]
at org.springframework.boot.web.reactive.context.ReactiveWebServerApplicationContext.refresh(ReactiveWebServerApplicationContext.java:61) ~[spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:762) [spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:398) [spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:330) [spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1258) [spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1246) [spring-boot-2.0.4.RELEASE.jar:2.0.4.RELEASE]
at hello.SpringBootApplication.main(SpringBootApplication.java:8) [classes/:na]
Code:
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.0.4.RELEASE</version>
</parent>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
</dependencies>
#RestController
#RequestMapping("/stock/transaction")
public class StockTransactionController {
#Autowired
StockTransactionService stockTransactionService;
#GetMapping(produces = MediaType.APPLICATION_STREAM_JSON_VALUE)
public Flux<StockTransaction> stockTransactionEvents() {
return stockTransactionService.getStockTransactions();
}
}
#Service
public class StockTransactionService {
List<Stock> stockList = new ArrayList<>();
List<String> stockNames =
Arrays.asList("mango,banana,guava,infinity".split(","));
#Bean
CommandLineRunner commandLineRunner() {
return args -> {
createRandomStock();
stockList.forEach(System.out::println);
};
}
public Flux<StockTransaction> getStockTransactions() {
Flux<Long> interval = Flux.interval(Duration.ofSeconds(1));
interval.subscribe((i) -> stockList.forEach(stock ->
stock.setPrice(changePrice(stock.getPrice()))));
Flux<StockTransaction> stockTransactionFlux = Flux
.fromStream(Stream.generate(() -> new
StockTransaction(getRandomUser(), getRandomStock(), new Date())));
return Flux.zip(interval, stockTransactionFlux).map(Tuple2::getT2);
}
}
Please help.
The problem was with annotation. #SpringBootApplication

Apache Storm intellij local mode - NimbusLeaderNotFoundException

I have setup a project trying to run standard "ExclamationTopology" on the in-memory version of the storm, triggered from the IntelliJ IDE. There goes my POM.xml:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>storm</groupId>
<artifactId>sample</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<hbase.version>0.98.4-hadoop2</hbase.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-client</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-server</artifactId>
<version>2.0.0-SNAPSHOT</version>
</dependency>
</dependencies>
<repositories>
<repository>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
<id>central</id>
<url>http://repo1.maven.org/maven2/</url>
</repository>
<repository>
<releases>
<enabled>true</enabled>
</releases>
<snapshots>
<enabled>true</enabled>
</snapshots>
<id>clojars</id>
<url>https://clojars.org/repo/</url>
</repository>
</repositories>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.7.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2.1</version>
<configuration>
<mainClass>test.ExclamationTopology</mainClass>
<arguments>-local</arguments>
</configuration>
</plugin>
</plugins>
</build>
</project>
Along with the sample source code of my topology:
public class ExclamationTopology extends ConfigurableTopology {
public static class ExclamationBolt extends BaseRichBolt {
OutputCollector _collector;
#Override
public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) {
_collector = collector;
}
#Override
public void execute(Tuple tuple) {
_collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
_collector.ack(tuple);
}
#Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word"));
}
}
public static void main(String[] args) throws Exception {
ConfigurableTopology.start(new ExclamationTopology(), args);
}
protected int run(String[] args) {
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("word", new TestWordSpout(), 10);
builder.setBolt("exclaim1", new ExclamationBolt(), 3).shuffleGrouping("word");
builder.setBolt("exclaim2", new ExclamationBolt(), 2).shuffleGrouping("exclaim1");
conf.setDebug(true);
String topologyName = "test";
conf.setNumWorkers(3);
if (args != null && args.length > 0) {
topologyName = args[0];
}
return submit(topologyName, conf, builder);
}
}
In order to be able to run the topology locally from within my IDE via Maven I included exec maven plugin. Then I use the following mvn command to run the application:
exec:java -Dexec.args=-local
However, I do get the following exception:
java.lang.RuntimeException: java.lang.RuntimeException: org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused: connect
at org.apache.storm.security.auth.ThriftClient.reconnect(ThriftClient.java:110) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.ThriftClient.<init>(ThriftClient.java:70) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.utils.NimbusClient.<init>(NimbusClient.java:158) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:113) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.utils.NimbusClient.getConfiguredClient(NimbusClient.java:83) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.blobstore.NimbusBlobStore.prepare(NimbusBlobStore.java:268) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.StormSubmitter.getListOfKeysFromBlobStore(StormSubmitter.java:599) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.StormSubmitter.validateConfs(StormSubmitter.java:565) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.StormSubmitter.submitTopologyAs(StormSubmitter.java:211) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.StormSubmitter.submitTopology(StormSubmitter.java:391) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.StormSubmitter.submitTopology(StormSubmitter.java:163) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.topology.ConfigurableTopology.submit(ConfigurableTopology.java:94) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at test.ExclamationTopology.run(ExclamationTopology.java:69) [classes/:?]
at org.apache.storm.topology.ConfigurableTopology.start(ConfigurableTopology.java:70) [storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at test.ExclamationTopology.main(ExclamationTopology.java:47) [classes/:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:297) [exec-maven-plugin-1.2.1.jar:?]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
Caused by: java.lang.RuntimeException: org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused: connect
at org.apache.storm.security.auth.TBackoffConnect.retryNext(TBackoffConnect.java:64) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.TBackoffConnect.doConnectWithRetry(TBackoffConnect.java:56) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.ThriftClient.reconnect(ThriftClient.java:102) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
... 20 more
Caused by: org.apache.thrift.transport.TTransportException: java.net.ConnectException: Connection refused: connect
at org.apache.thrift.transport.TSocket.open(TSocket.java:226) ~[libthrift-0.9.3.jar:0.9.3]
at org.apache.thrift.transport.TFramedTransport.open(TFramedTransport.java:81) ~[libthrift-0.9.3.jar:0.9.3]
at org.apache.storm.security.auth.SimpleTransportPlugin.connect(SimpleTransportPlugin.java:105) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.TBackoffConnect.doConnectWithRetry(TBackoffConnect.java:53) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.ThriftClient.reconnect(ThriftClient.java:102) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
... 20 more
Caused by: java.net.ConnectException: Connection refused: connect
at java.net.DualStackPlainSocketImpl.connect0(Native Method) ~[?:1.8.0_112]
at java.net.DualStackPlainSocketImpl.socketConnect(DualStackPlainSocketImpl.java:79) ~[?:1.8.0_112]
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[?:1.8.0_112]
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[?:1.8.0_112]
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[?:1.8.0_112]
at java.net.PlainSocketImpl.connect(PlainSocketImpl.java:172) ~[?:1.8.0_112]
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[?:1.8.0_112]
at java.net.Socket.connect(Socket.java:589) ~[?:1.8.0_112]
at org.apache.thrift.transport.TSocket.open(TSocket.java:221) ~[libthrift-0.9.3.jar:0.9.3]
at org.apache.thrift.transport.TFramedTransport.open(TFramedTransport.java:81) ~[libthrift-0.9.3.jar:0.9.3]
at org.apache.storm.security.auth.SimpleTransportPlugin.connect(SimpleTransportPlugin.java:105) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.TBackoffConnect.doConnectWithRetry(TBackoffConnect.java:53) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
at org.apache.storm.security.auth.ThriftClient.reconnect(ThriftClient.java:102) ~[storm-client-2.0.0-SNAPSHOT.jar:2.0.0-SNAPSHOT]
... 20 more
org.apache.storm.utils.NimbusLeaderNotFoundException: Could not find leader nimbus from seed hosts [localhost]. Did you specify a valid list of nimbus hosts for config nimbus.seeds?
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:141)
at org.apache.storm.utils.NimbusClient.getConfiguredClient(NimbusClient.java:83)
at org.apache.storm.blobstore.NimbusBlobStore.prepare(NimbusBlobStore.java:268)
at org.apache.storm.StormSubmitter.getListOfKeysFromBlobStore(StormSubmitter.java:599)
at org.apache.storm.StormSubmitter.validateConfs(StormSubmitter.java:565)
The README for storm-starter is out of date. The examples don't run locally anymore, because ConfigurableTopology was changed to not support this here https://github.com/apache/storm/commit/b254ede46a25466749cd48ebd4bcb56dd791ec4a#diff-de7eab133732a8b5b97be6aa7328e392R92.
If you want to run it locally, you can use https://github.com/apache/storm/blob/master/storm-server/src/main/java/org/apache/storm/LocalCluster.java, which should replace the call to submit in your topology code. Otherwise you'll have to set up a local Storm instance to run the topology (which is very easy, see https://storm.apache.org/releases/2.0.0-SNAPSHOT/Setting-up-a-Storm-cluster.html. The storm-starter README tells you how to submit to an installed cluster).
Edit:
If you want to run it locally, another option would probably be to use the "storm local" command.
PS E:\apache-storm-2.0.0-SNAPSHOT\bin> ./storm help local
Syntax: [storm local topology-jar-path class ...]
Runs the main method of class with the specified arguments but pointing to a local cluster
The storm jars and configs in ~/.storm are put on the classpath.
The process is configured so that StormSubmitter
(http://storm.apache.org/releases/current/javadocs/org/apache/storm/StormSubmitter.html)
and others will interact with a local cluster instead of the one configured by default.
Most options should work just like with the storm jar command.
local also adds in the option --local-ttl which sets the number of seconds the
local cluster will run for before it shuts down.
--java-debug lets you turn on java debugging and set the parameters passed to -agentlib:jdwp on the JDK
--java-debug transport=dt_socket,address=localhost:8000
will open up a debugging server on port 8000.
The documentation for local mode has been updated in the Storm repo, but hasn't yet made it to the website. See https://github.com/apache/storm/blob/master/docs/Local-mode.md for the new docs.

Problems setting up Spring form validation

I have some problems setting up the validation in Spring MVC (4.3.9) application.
Basically, there is a simple application, with the issue isolated to one "contact" form.
Here is part of the code of the bean underlying the form:
#Component
#Scope("request")
public class SendMail {
#Min(20)
private String userName;
#Size(min=10, max=11)
private String message;
private String userEmail;
public static class CustomValidator implements Validator {
#Override
public boolean supports(Class clazz) {
return SendMail.class.equals(clazz);
}
#Override
public void validate(Object targets, Errors errors) {
SendMail o = (SendMail) targets;
if (o.getUserName() == null || o.getUserName().length() < 1) {
errors.rejectValue("userName", "Empty.userName");
}
}
}
....
}
and here is the part of the relevant controller:
#RequestMapping(value = {"/contact"}, method = RequestMethod.POST)
public ModelAndView sendEmail(#Valid SendMail sendmail, BindingResult result) {
System.out.println("Result1: " + result.hasErrors());
DataBinder binder = new DataBinder(sendmail);
binder.setValidator(new SendMail.CustomValidator());
binder.validate();
result = binder.getBindingResult();
System.out.println("Result2: " + result.hasErrors());
I get the following result:
Result1: false
Result2: true
So, the 'general' (denoted by the #Valid annotation) validation doesnt seem to work, but a manually-invoked validation does.
I googled for some time and have come across suggestions that a validation bean needs to be defined in the spring config. Unfortunately, when I add the following to my Config.java:
....
#Bean(name = "validator")
public Validator getValidator() {
LocalValidatorFactoryBean validator = new LocalValidatorFactoryBean();
return validator;
}
The application crashes with the following exception:
....
root cause
java.lang.ClassNotFoundException: javax.el.ExpressionFactory
java.net.URLClassLoader.findClass(URLClassLoader.java:381)
java.lang.ClassLoader.loadClass(ClassLoader.java:424)
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
java.lang.ClassLoader.loadClass(ClassLoader.java:357)
java.lang.ClassLoader.defineClass1(Native Method)
java.lang.ClassLoader.defineClass(ClassLoader.java:763)
java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
java.net.URLClassLoader.access$100(URLClassLoader.java:73)
java.net.URLClassLoader$1.run(URLClassLoader.java:368)
java.net.URLClassLoader$1.run(URLClassLoader.java:362)
java.security.AccessController.doPrivileged(Native Method)
java.net.URLClassLoader.findClass(URLClassLoader.java:361)
java.lang.ClassLoader.loadClass(ClassLoader.java:424)
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
java.lang.ClassLoader.loadClass(ClassLoader.java:411)
java.lang.ClassLoader.loadClass(ClassLoader.java:357)
java.lang.Class.forName0(Native Method)
java.lang.Class.forName(Class.java:348)
org.apache.catalina.loader.WebappClassLoaderBase.loadClass(Unknown Source)
org.apache.catalina.loader.WebappClassLoaderBase.loadClass(Unknown Source)
javax.el.ExpressionFactory.newInstance(Unknown Source)
javax.el.ExpressionFactory.newInstance(Unknown Source)
....
This supposedly indicates, there is no lib with EL implementation. And here is where I'm stuck. I have tried a number of dependency (using maven) with no avail. Currently the relevant part of my pom looks like this:
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<version>3.0.0</version>
</dependency>
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>1.1.0.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>5.4.1.Final</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator-annotation-processor</artifactId>
<version>5.4.1.Final</version>
</dependency>
I have tried various set-ups, with el-impl 2.2, etc. addin el.jar to Tomcat clasppath. But no luck at all =(
What is even more starange, if you look at the exception above, the call actually goes through "javax.el.ExpressionFactory.newInstance(Unknown Source)" before resulting in "java.lang.ClassNotFoundException: javax.el.ExpressionFactory".
I'd be grateful for any hints on how to fix/debug the issue...
Add this dependency .
<dependency>
<groupId>org.glassfish.web</groupId>
<artifactId>el-impl</artifactId>
<version>2.2</version>
</dependency>
Follow solution here .
I have found the solution to my issue. Strange as it seems, when I added el.jar supplied with Tomcat to the classpath in catalina.sh, the validation works. It might be a distro-related issue (i'm running it on Gentoo).

Ehcache, CacheException with JMS replication and activemq

I'm trying to implement a hibernate clustered 2nd level cache with ehcache, using JMS replication.
Hibernate version is 3.6.10.final, spring version is 3.2.2.
When the servlet is starting, I get the following error:
Caused by: org.hibernate.cache.CacheException: net.sf.ehcache.CacheException: Failure cloning default cache. Initial cause was not supported
at net.sf.ehcache.hibernate.AbstractEhcacheProvider.buildCache(AbstractEhcacheProvider.java:73)
at org.hibernate.cache.impl.bridge.RegionFactoryCacheProviderBridge.buildEntityRegion(RegionFactoryCacheProviderBridge.java:104)
at org.hibernate.impl.SessionFactoryImpl.<init>(SessionFactoryImpl.java:280)
at org.hibernate.cfg.Configuration.buildSessionFactory(Configuration.java:1872)
at com.targa.fleetGateway.HibernateUtil.<init>(HibernateUtil.java:26)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.springframework.beans.BeanUtils.instantiateClass(BeanUtils.java:148)
... 73 more
Caused by: net.sf.ehcache.CacheException: Failure cloning default cache. Initial cause was not supported
at net.sf.ehcache.CacheManager.cloneDefaultCache(CacheManager.java:1877)
at net.sf.ehcache.CacheManager.addCache(CacheManager.java:1173)
at net.sf.ehcache.hibernate.AbstractEhcacheProvider.buildCache(AbstractEhcacheProvider.java:66)
... 82 more
Caused by: java.lang.CloneNotSupportedException: not supported
at net.sf.ehcache.distribution.jms.JMSCacheLoader.clone(JMSCacheLoader.java:269)
at net.sf.ehcache.Cache.clone(Cache.java:2846)
at net.sf.ehcache.Cache.clone(Cache.java:163)
at net.sf.ehcache.CacheManager.cloneDefaultCache(CacheManager.java:1875)
... 84 more
My ehcache.xml is:
<?xml version="1.0" encoding="UTF-8"?>
<ehcache xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://ehcache.org/ehcache.xsd" updateCheck="true" monitoring="autodetect" dynamicConfig="true">
<cacheManagerPeerProviderFactory class="net.sf.ehcache.distribution.jms.JMSCacheManagerPeerProviderFactory"
properties="initialContextFactoryName=com.targa.fleetGateway.ExampleActiveMQInitialContextFactory,
providerURL=tcp://127.0.0.1:61616, replicationTopicConnectionFactoryBindingName=topicConnectionFactory,
replicationTopicBindingName=ehcache, getQueueConnectionFactoryBindingName=queueConnectionFactory, getQueueBindingName=ehcacheGetQueue, topicConnectionFactoryBindingName=topicConnectionFactory, topicBindingName=ehcache"
propertySeparator="," />
<defaultCache
maxElementsInMemory="100"
eternal="false"
timeToIdleSeconds="300"
timeToLiveSeconds="600"
overflowToDisk="false">
<cacheEventListenerFactory class="net.sf.ehcache.distribution.jms.JMSCacheReplicatorFactory"
properties="replicateAsynchronously=true, replicatePuts=true, replicateUpdates=true,
replicateUpdatesViaCopy=true, replicateRemovals=true, asynchronousReplicationIntervalMillis=1000"
propertySeparator="," />
<cacheLoaderFactory class="net.sf.ehcache.distribution.jms.JMSCacheLoaderFactory"
properties="initialContextFactoryName=com.targa.fleetGateway.ExampleActiveMQInitialContextFactory,
providerURL=tcp://127.0.0.1:61616, replicationTopicConnectionFactoryBindingName=topicConnectionFactory,
getQueueConnectionFactoryBindingName=queueConnectionFactory, replicationTopicBindingName=ehcache,
getQueueBindingName=ehcacheGetQueue, timeoutMillis=10000" />
</defaultCache>
</ehcache>
If I comment out the cacheLoaderFactory section, things start working again.
The same configuration works in another application where I'm using hibernate 4.2, but I can't upgrade to 4.2 on this one.
Has anybody got any clue about this?
Below are the other relevant pieces of my configuration. Please tell me if anything is missing.
POM.xml
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
<version>3.6.10.Final</version>
</dependency>
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-core</artifactId>
<version>2.6.8</version>
</dependency>
<dependency>
<groupId>net.sf.ehcache</groupId>
<artifactId>ehcache-jmsreplication</artifactId>
<version>0.5</version>
</dependency>
<dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-core</artifactId>
<version>5.7.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>16.0.1</version>
</dependency>
Hibernate properties:
<property name="hibernate.cache.use_second_level_cache">true</property>
<property name="hibernate.cache.provider_class">
net.sf.ehcache.hibernate.SingletonEhCacheProvider</property>
ExampleActiveMQInitialContextFactory.java
public class ExampleActiveMQInitialContextFactory extends
ActiveMQInitialContextFactory {
/**
* {#inheritDoc}
*/
#Override
public Context getInitialContext(Hashtable environment) throws NamingException {
Map<String, Object> data = new ConcurrentHashMap<String, Object>();
String replicationTopicConnectionFactoryBindingName = (String) environment
.get(JMSUtil.TOPIC_CONNECTION_FACTORY_BINDING_NAME);
if (replicationTopicConnectionFactoryBindingName != null) {
try {
data.put(replicationTopicConnectionFactoryBindingName,
createConnectionFactory(environment));
} catch (URISyntaxException e) {
throw new NamingException(
"Error initialisating TopicConnectionFactory with message "
+ e.getMessage());
}
}
String getQueueConnectionfactoryBindingName = (String) environment
.get(JMSUtil.GET_QUEUE_CONNECTION_FACTORY_BINDING_NAME);
try {
data.put(getQueueConnectionfactoryBindingName,
createConnectionFactory(environment));
} catch (URISyntaxException e) {
throw new NamingException(
"Error initialisating TopicConnectionFactory with message "
+ e.getMessage());
}
String replicationTopicBindingName = (String) environment
.get(JMSUtil.REPLICATION_TOPIC_BINDING_NAME);
String getQueueBindingName = (String) environment
.get(JMSUtil.GET_QUEUE_BINDING_NAME);
if (replicationTopicBindingName != null) {
data.put(replicationTopicBindingName,
createTopic(replicationTopicBindingName));
}
data.put(getQueueBindingName, createQueue(getQueueBindingName));
return createContext(environment, data);
}
}
I faced this problem today: the reason is that defaultCache should not have: cacheLoaderFactory.
Strange that it does not mentioned in the documentation(
So I removed cacheLoaderFactory from defaultCache - and everything works fine;

Resources