I have Storm Bolt like follows,
package storm.bolt;
import java.util.Map;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.BatchWriter;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.MultiTableBatchWriter;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.ZooKeeperInstance;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.io.Text;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;
public class AccumuloBolt implements IRichBolt {
private static final long serialVersionUID = 1L;
private OutputCollector collector;
private ZooKeeperInstance instance;
private Connector connector;
private BatchWriter bw;
private Text colf;
private MultiTableBatchWriter mtbw;
private final String instanceName;
private final String zooServers;
private final String userName;
private final String password;
Map<String, Integer> counters;
/**
* #param zooServers The host on which Zookeeper is running.
* #param userName for which Accumula username.
* #param password The Acumula passowrd
* written to.
* String instanceName = "myistance";
* String zooServers = "192.168.1.81:2181";
* String userName = "root";
* String password = "aryadevi";
*/
public AccumuloBolt(String instanceName, String zooServers, String userName,
String password) {
this.instanceName = instanceName;
this.zooServers = zooServers;
this.userName = userName;
this.password = password;
}
public void prepare( Map stormConf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
try {
//this.instance = new ZooKeeperInstance(instanceName, zooServers);
this.instance = new ZooKeeperInstance("myistance", "192.168.1.81:2181");
//this.connector= instance.getConnector(userName, password);
this.connector= instance.getConnector("root", "aryadevi");
this.mtbw=connector.createMultiTableBatchWriter(200000l, 300, 4);
this.bw=null;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void execute(Tuple input) {
if (shouldActOnInput(input)) {
try{
if (!this.connector.tableOperations().exists("new2"))
this.connector.tableOperations().create("new2");
this.bw = this.mtbw.getBatchWriter("new2");
this.colf=new Text("colfam");
System.out.println("writing ...");
String str = input.getString(0);
if(!counters.containsKey(str)){
counters.put(str, 1);
}else{
Integer c = counters.get(str) + 1;
counters.put(str, c);
}
}catch (Exception e) {
throw new RuntimeException(e);
}
//DBObject updateObj = getDBObjectForInput(input);
//this.bw.addMutation(m);
} else {
collector.ack(input);
}
}
public void cleanup() {
try{
for(Map.Entry<String, Integer> entry : counters.entrySet()){
Mutation m = new Mutation(new Text(String.format("row_%d",entry.getKey() )));
m.put(this.colf, new Text(String.format("colqual_%d", entry.getKey())), new Value((String.format("value_%d", entry.getValue())).getBytes()));
System.out.println(entry.getKey()+": "+entry.getValue());
bw.addMutation(m);
}
this.mtbw.close();
}catch (Exception e) {
throw new RuntimeException(e);
}
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// TODO Auto-generated method stub
}
public boolean shouldActOnInput(Tuple input) {
return true;
}
public Map<String, Object> getComponentConfiguration() {
// TODO Auto-generated method stub
return null;
}
}
I just compile the this torm using "mvn comple" and create a package using mvn package
then i just run the storm using following commend
storm jar target/storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar storm.TwitterStorm
after running this commend getting error like follows
java.lang.NoClassDefFoundError: Could not initialize class org.apache.accumulo.core.client.ZooKeeperInstance
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
46217 [Thread-8-count] ERROR backtype.storm.util - Async loop died!
java.lang.ExceptionInInitializerError: null
at org.apache.log4j.Logger.getLogger(Logger.java:39) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
at org.apache.log4j.Logger.getLogger(Logger.java:43) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
at org.apache.accumulo.core.client.ZooKeeperInstance.<clinit>(ZooKeeperInstance.java:63) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
Caused by: java.lang.IllegalStateException: Detected both log4j-over-slf4j.jar AND slf4j-log4j12.jar on the class path, preempting StackOverflowError. See also http://www.slf4j.org/codes.html#log4jDelegationLoop for more details.
at org.apache.log4j.Log4jLoggerFactory.<clinit>(Log4jLoggerFactory.java:49) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
... 8 common frames omitted
46218 [Thread-10-count] ERROR backtype.storm.daemon.executor -
java.lang.NoClassDefFoundError: Could not initialize class org.apache.accumulo.core.client.ZooKeeperInstance
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
46218 [Thread-8-count] ERROR backtype.storm.daemon.executor -
java.lang.ExceptionInInitializerError: null
at org.apache.log4j.Logger.getLogger(Logger.java:39) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
at org.apache.log4j.Logger.getLogger(Logger.java:43) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
at org.apache.accumulo.core.client.ZooKeeperInstance.<clinit>(ZooKeeperInstance.java:63) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
Caused by: java.lang.IllegalStateException: Detected both log4j-over-slf4j.jar AND slf4j-log4j12.jar on the class path, preempting StackOverflowError. See also http://www.slf4j.org/codes.html#log4jDelegationLoop for more details.
at org.apache.log4j.Log4jLoggerFactory.<clinit>(Log4jLoggerFactory.java:49) ~[log4j-over-slf4j-1.6.6.jar:1.6.6]
... 8 common frames omitted
46218 [Thread-6] INFO backtype.storm.daemon.executor - Loading executor count:[4 4]
46219 [Thread-6] INFO backtype.storm.daemon.task - Emitting: count __system ["startup"]
46220 [Thread-6] INFO backtype.storm.daemon.executor - Loaded executor tasks count:[4 4]
46224 [Thread-6] INFO backtype.storm.daemon.executor - Finished loading executor count:[4 4]
46224 [Thread-12-count] INFO backtype.storm.daemon.executor - Preparing bolt count:(4)
46225 [Thread-12-count] ERROR backtype.storm.util - Async loop died!
java.lang.NoClassDefFoundError: Could not initialize class org.apache.accumulo.core.client.ZooKeeperInstance
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
46226 [Thread-12-count] ERROR backtype.storm.daemon.executor -
java.lang.NoClassDefFoundError: Could not initialize class org.apache.accumulo.core.client.ZooKeeperInstance
at storm.bolt.AccumuloBolt.prepare(AccumuloBolt.java:60) ~[storm-twitter-0.0.1-SNAPSHOT-jar-with-dependencies.jar:na]
at backtype.storm.daemon.executor$fn__5641$fn__5653.invoke(executor.clj:690) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__457.invoke(util.clj:429) ~[storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:744) [na:1.7.0_55]
46321 [Thread-10-count] INFO backtype.storm.util - Halting process: ("Worker died")
46321 [Thread-8-count] INFO backtype.storm.util - Halting process: ("Worker died")
It looks like this Storm ticket has a relevant discussion:
https://issues.apache.org/jira/browse/STORM-122
I think Accumulo has an slf4j-log4j12 dependency, and Storm uses log4j-over-slf4j which is incompatible. The discussion seems to suggest excluding logging dependencies like slf4j-log4j12 and log4j from your Accumulo dependency. I don't know if this will work, but it's worth a try.
Related
I am using spring-boot & spring-kafka (see pom.xml for specific versions) with Apache Kafka v2.0.1 and I am facing a weird issue when using transactions via Listener Container.
The issue is raised when there is an error originated by Kafka while trying to publish new message on consume-process-produce cycle. To emulate it, I set topic's min.insync.replicas setting (4) greater than Kafka's available brokers (3).
I was expecting that due to factory.setAfterRollbackProcessor(new DefaultAfterRollbackProcessor<String, String>(-1));, there would be transaction rollback on each unsuccessful attempt and infinite retries.
As you may see on the output.log, when processing fails on the 1st try (message receival), the listener container initiates the transaction rollback and then it receives again the same message (as expected).
However, while trying for the 2nd time to process & publish the new message, it blocks forever on kafkaTemplate.send(record).get(); and no transaction rollback is initiated... By the way, at that point, if I set back my min.insync.replicas setting to an acceptable value (<=brokers), the process continues normally and the transaction is committed.
In order to emulate it, just publish a string message to the topic, increase topic's min.insync.replicas to unacceptable value and run the application.
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.3.RELEASE</version>
</parent>
<groupId>com.project.test</groupId>
<artifactId>kafka-transactions</artifactId>
<version>0.0.1-SNAPSHOT</version>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
Application.java
package com.project.test;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.listener.ContainerProperties.AckMode;
import org.springframework.kafka.listener.DefaultAfterRollbackProcessor;
import org.springframework.kafka.transaction.KafkaTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
#EnableKafka
#EnableTransactionManagement
#SpringBootApplication
public class Application {
private String kafkaBootstrapServers = "127.0.0.1:9092";
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
#Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true");
configProps.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1");
configProps.put(ProducerConfig.ACKS_CONFIG, "all");
configProps.put(ProducerConfig.RETRIES_CONFIG, "1");
DefaultKafkaProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(configProps);
producerFactory.setTransactionIdPrefix("tx.");
return producerFactory;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
#Bean
public KafkaTransactionManager<String, String> kafkaTransactionManager() {
KafkaTransactionManager<String, String> manager = new KafkaTransactionManager<String, String>(producerFactory());
return manager;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBootstrapServers);
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, "test-kafka-tx-consumer");
configProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
configProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
return new DefaultKafkaConsumerFactory<>(configProps);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(1);
factory.getContainerProperties().setTransactionManager(kafkaTransactionManager());
factory.getContainerProperties().setAckMode(AckMode.RECORD);
factory.getContainerProperties().setSyncCommits(true);
factory.getContainerProperties().setCommitLogLevel(org.springframework.kafka.support.LogIfLevelEnabled.Level.INFO);
factory.setAfterRollbackProcessor(new DefaultAfterRollbackProcessor<String, String>(-1));
return factory;
}
}
KafkaTransactions.java
package com.project.test;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
#Component
public class KafkaTransactions {
private static final Logger log = LoggerFactory.getLogger(KafkaTransactions.class);
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#KafkaListener(topics = "test-kafka-transactions")
public void messageListener(String value) throws Exception {
log.info("Received message");
ProducerRecord<String, String> record = new ProducerRecord<>("test-kafka-transactions", null, value);
Thread.sleep(2000);
log.info("Adding new message on Kafka transaction (commit is handled by the Listener Container)");
kafkaTemplate.send(record).get();
log.info("Processed message");
}
}
output.log
2019-04-04 10:21:25.741 INFO 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] o.s.k.l.KafkaMessageListenerContainer : partitions assigned: [test-kafka-transactions-3, test-kafka-transactions-2, test-kafka-transactions-1, test-kafka-transactions-0]
2019-04-04 10:23:03.240 INFO 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] com.project.test.KafkaTransactions : Received message
2019-04-04 10:23:05.245 INFO 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] com.project.test.KafkaTransactions : Adding new message on Kafka transaction (commit is handled by the Listener Container)
2019-04-04 10:23:05.247 INFO 19316 --- [kafka-producer-network-thread | producer-1] org.apache.kafka.clients.Metadata : Cluster ID: gvDhAK6YRsWzh2FrxukHnA
2019-04-04 10:23:05.267 WARN 19316 --- [kafka-producer-network-thread | producer-1] o.a.k.clients.producer.internals.Sender : [Producer clientId=producer-1, transactionalId=tx.test-kafka-tx-consumer.test-kafka-transactions.3] Got error produce response with correlation id 12 on topic-partition test-kafka-transactions-3, retrying (0 attempts left). Error: NOT_ENOUGH_REPLICAS
2019-04-04 10:23:05.372 ERROR 19316 --- [kafka-producer-network-thread | producer-1] o.s.k.support.LoggingProducerListener : Exception thrown when sending a message with key='1' and payload='1' to topic test-kafka-transactions:
org.apache.kafka.common.errors.NotEnoughReplicasException: Messages are rejected since there are fewer in-sync replicas than required.
2019-04-04 10:23:05.372 ERROR 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] essageListenerContainer$ListenerConsumer : Transaction rolled back
org.springframework.kafka.listener.ListenerExecutionFailedException: Listener method 'public void com.project.test.KafkaTransactions.messageListener(java.lang.String) throws java.lang.Exception' threw exception; nested exception is java.util.concurrent.ExecutionException: org.springframework.kafka.core.KafkaProducerException: Failed to send; nested exception is org.apache.kafka.common.errors.NotEnoughReplicasException: Messages are rejected since there are fewer in-sync replicas than required.
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:302) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:79) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.adapter.RecordMessagingMessageListenerAdapter.onMessage(RecordMessagingMessageListenerAdapter.java:50) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeOnMessage(KafkaMessageListenerContainer.java:1224) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeOnMessage(KafkaMessageListenerContainer.java:1217) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:1178) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.access$1600(KafkaMessageListenerContainer.java:384) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer$3.doInTransactionWithoutResult(KafkaMessageListenerContainer.java:1128) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.transaction.support.TransactionCallbackWithoutResult.doInTransaction(TransactionCallbackWithoutResult.java:36) ~[spring-tx-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:140) ~[spring-tx-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListenerInTx(KafkaMessageListenerContainer.java:1118) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:1096) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:934) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:750) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:699) [spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_191]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_191]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_191]
Caused by: java.util.concurrent.ExecutionException: org.springframework.kafka.core.KafkaProducerException: Failed to send; nested exception is org.apache.kafka.common.errors.NotEnoughReplicasException: Messages are rejected since there are fewer in-sync replicas than required.
at java.util.concurrent.FutureTask.report(FutureTask.java:122) [na:1.8.0_191]
at java.util.concurrent.FutureTask.get(FutureTask.java:192) [na:1.8.0_191]
at org.springframework.util.concurrent.SettableListenableFuture.get(SettableListenableFuture.java:119) ~[spring-core-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at com.project.test.KafkaTransactions.messageListener(KafkaTransactions.java:29) ~[classes/:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_191]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_191]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_191]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_191]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:170) ~[spring-messaging-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:120) ~[spring-messaging-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:48) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:283) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
... 17 common frames omitted
Caused by: org.springframework.kafka.core.KafkaProducerException: Failed to send; nested exception is org.apache.kafka.common.errors.NotEnoughReplicasException: Messages are rejected since there are fewer in-sync replicas than required.
at org.springframework.kafka.core.KafkaTemplate.lambda$buildCallback$0(KafkaTemplate.java:396) ~[spring-kafka-2.2.4.RELEASE.jar:2.2.4.RELEASE]
at org.apache.kafka.clients.producer.KafkaProducer$InterceptorCallback.onCompletion(KafkaProducer.java:1235) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.ProducerBatch.completeFutureAndFireCallbacks(ProducerBatch.java:204) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.ProducerBatch.done(ProducerBatch.java:187) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.failBatch(Sender.java:635) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.failBatch(Sender.java:604) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.completeBatch(Sender.java:561) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.handleProduceResponse(Sender.java:485) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.access$100(Sender.java:74) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender$1.onComplete(Sender.java:700) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.ClientResponse.onComplete(ClientResponse.java:109) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.NetworkClient.completeResponses(NetworkClient.java:532) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.NetworkClient.poll(NetworkClient.java:524) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:239) ~[kafka-clients-2.0.1.jar:na]
at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:163) ~[kafka-clients-2.0.1.jar:na]
... 1 common frames omitted
Caused by: org.apache.kafka.common.errors.NotEnoughReplicasException: Messages are rejected since there are fewer in-sync replicas than required.
2019-04-04 10:23:05.372 INFO 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] com.project.test.KafkaTransactions : Received message
2019-04-04 10:23:07.391 INFO 19316 --- [org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] com.project.test.KafkaTransactions : Adding new message on Kafka transaction (commit is handled by the Listener Container)
I was able to reproduce it after taking Spring out of the picture.
I submitted a bug KAFKA-8195.
While running Kafka -> Apache Apex ->Hbase, it is saying following exception in Yarn tasks:
com.datatorrent.stram.StreamingAppMasterService: Application master, appId=4, clustertimestamp=1479188884109, attemptId=2
2016-11-15 11:59:51,068 INFO org.apache.hadoop.service.AbstractService: Service com.datatorrent.stram.StreamingAppMasterService failed in state INITED; cause: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
at org.apache.hadoop.fs.AbstractFileSystem.newInstance(AbstractFileSystem.java:130)
at org.apache.hadoop.fs.AbstractFileSystem.createFileSystem(AbstractFileSystem.java:156)
at org.apache.hadoop.fs.AbstractFileSystem.get(AbstractFileSystem.java:241)
at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:333)
at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:330)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
at org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:330)
at org.apache.hadoop.fs.FileContext.getFileContext(FileContext.java:444)
And my DataTorrent log showing the following exception. I am running the app which communicates Kafka -> Apex -> Hbase streaming application.
Connecting to ResourceManager at hduser1/127.0.0.1:8032
16/11/15 17:47:38 WARN client.EventsAgent: Cannot read events for application_1479208737206_0008: java.io.FileNotFoundException: File does not exist: /user/hduser1/datatorrent/apps/application_1479208737206_0008/events/index.txt
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:66)
at org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:56)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsUpdateTimes(FSNamesystem.java:1893)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1834)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1814)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1786)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:552)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:362)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2036)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1656)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2034)
Adding the code :
public void populateDAG(DAG dag, Configuration conf){
KafkaSinglePortInputOperator in
= dag.addOperator("kafkaIn", new KafkaSinglePortInputOperator());
in.setInitialOffset(AbstractKafkaInputOperator.InitialOffset.EARLIEST.name());
LineOutputOperator out = dag.addOperator("fileOut", new LineOutputOperator());
dag.addStream("data", in.outputPort, out.input);}
LineOutputOperator extends AbstractFileOutputOperator
private static final String NL = System.lineSeparator();
private static final Charset CS = StandardCharsets.UTF_8;
#NotNull
private String baseName;
#Override
public byte[] getBytesForTuple(byte[] t) {
String result = new String(t, CS) + NL;
return result.getBytes(CS);
}
#Override
protected String getFileName(byte[] tuple) {
return baseName;
}
public String getBaseName() { return baseName; }
public void setBaseName(String v) { baseName = v; }
How to resolve this problem?
Thanks.
Can you share some details about your environment like what version of hadoop and apex ? Also, which log does this exception appear in ?
Just as a simple sanity check, can you run the simple maven archetype generated application described at: http://docs.datatorrent.com/beginner/
If that works, try running the fileIO and kafka applications at:
https://github.com/DataTorrent/examples/tree/master/tutorials
If those work ok we can look at the details of your code.
I got the solution,
The problem related to expiry of my license, So reinstalled new one and works fine for actual code.
I'm trying to get a hold of DistributedCache. I'm using Apache Hadoop 1.2.1 on two nodes.
I referred to the Cloudera post which is simply extended in the other posts that explain how to use third-party jars using -libjars
Note:
In my jar, I haven't included any jar libs. - neither Hadoop core nor commons lang.
The code :
public class WordCounter extends Configured implements Tool {
#Override
public int run(String[] args) throws Exception {
// TODO Auto-generated method stub
// Job job = new Job(getConf(), args[0]);
Job job = new Job(super.getConf(), args[0]);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setJarByClass(WordCounter.class);
FileInputFormat.setInputPaths(job, new Path(args[1]));
FileOutputFormat.setOutputPath(job, new Path(args[2]));
job.setMapperClass(WCMapper.class);
job.setReducerClass(WCReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
int jobState = job.waitForCompletion(true) ? 0 : 1;
return jobState;
}
public static void main(String[] args) throws Exception {
// TODO Auto-generated method stub
if (args == null || args.length < 3) {
System.out.println("The below three arguments are expected");
System.out
.println("<job name> <hdfs path of the input file> <hdfs path of the output file>");
return;
}
WordCounter wordCounter = new WordCounter();
// System.exit(ToolRunner.run(wordCounter, args));
System.exit(ToolRunner.run(new Configuration(), wordCounter, args));
}
}
The Mapper class is naive, its only attempting to use the StringUtils from Apache Commons(and NOT hadoop)
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
/**
* #author 298790
*
*/
public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
private static IntWritable one = new IntWritable(1);
#Override
protected void map(
LongWritable key,
Text value,
org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
StringTokenizer strTokenizer = new StringTokenizer(value.toString());
Text token = new Text();
while (strTokenizer.hasMoreTokens()) {
token.set(strTokenizer.nextToken());
context.write(token, one);
}
System.out.println("Converting " + value + " to upper case "
+ StringUtils.upperCase(value.toString()));
}
}
The commands that I use :
bigdata#slave3:~$ export HADOOP_CLASSPATH=dumphere/lib/commons-lang3-3.1.jar
bigdata#slave3:~$
bigdata#slave3:~$ echo $HADOOP_CLASSPATH
dumphere/lib/commons-lang3-3.1.jar
bigdata#slave3:~$
bigdata#slave3:~$ echo $LIBJARS
dumphere/lib/commons-lang3-3.1.jar
bigdata#slave3:~$ hadoop jar dumphere/code/jars/hdp_3rdparty.jar com.hadoop.basics.WordCounter "WordCount" "/input/dumphere/Childhood_days.txt" "/output/dumphere/wc" -libjars ${LIBJARS}
The exception I get :
Warning: $HADOOP_HOME is deprecated.
14/08/13 21:56:05 INFO input.FileInputFormat: Total input paths to process : 1
14/08/13 21:56:05 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/08/13 21:56:05 WARN snappy.LoadSnappy: Snappy native library not loaded
14/08/13 21:56:05 INFO mapred.JobClient: Running job: job_201408111719_0190
14/08/13 21:56:06 INFO mapred.JobClient: map 0% reduce 0%
14/08/13 21:56:37 INFO mapred.JobClient: Task Id : attempt_201408111719_0190_m_000000_0, Status : FAILED
Error: java.lang.ClassNotFoundException: org.apache.commons.lang3.StringUtils
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at com.hadoop.basics.WCMapper.map(WCMapper.java:40)
at com.hadoop.basics.WCMapper.map(WCMapper.java:1)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:364)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
14/08/13 21:56:42 INFO mapred.JobClient: Task Id : attempt_201408111719_0190_m_000000_1, Status : FAILED
Error: java.lang.ClassNotFoundException: org.apache.commons.lang3.StringUtils
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
at com.hadoop.basics.WCMapper.map(WCMapper.java:40)
at com.hadoop.basics.WCMapper.map(WCMapper.java:1)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:145)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:764)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:364)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
The Cloudera post mentions :
The jar will be placed in distributed cache and will be made available to all of the job’s task attempts. More specifically, you will find the JAR in one of the ${mapred.local.dir}/taskTracker/archive/${user.name}/distcache/… subdirectories on local nodes.
But on that path, I'm not able to find the commons-lang3-3.1.jar
What am I missing?
I'm trying to convert the xml application configuration files to Java based configuration.
I'm encountering a problem with the entityManager. It worked well with persistence.xml and web.xml
<!-- Entity Manager -->
<jee:jndi-lookup id="entityManagerFactory" jndi-name="jdbc/derby"></jee:jndi-lookup>
I use
Spring 3.2
maven 3
Java 7
glassfish embedded 3.1.1
Spring Data JPA 1.4.2
Could you help me ? Thank you !
Stack trace
mai 27, 2014 11:19:57 PM org.apache.catalina.core.ContainerBase addChildInternal
Grave: ContainerBase.addChild: start:
org.apache.catalina.LifecycleException: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'entityManagerFactory' defined in class path resource [com/marc/embeddedglassfish/config/PersistenceContext.class]: Invocation of init method failed; nested exception is java.lang.reflect.UndeclaredThrowableException
at org.apache.catalina.core.StandardContext.start(StandardContext.java:5389)
at com.sun.enterprise.web.WebModule.start(WebModule.java:498)
at org.apache.catalina.core.ContainerBase.addChildInternal(ContainerBase.java:917)
at org.apache.catalina.core.ContainerBase.addChild(ContainerBase.java:901)
at org.apache.catalina.core.StandardHost.addChild(StandardHost.java:733)
at com.sun.enterprise.web.WebContainer.loadWebModule(WebContainer.java:2000)
at com.sun.enterprise.web.WebContainer.loadWebModule(WebContainer.java:1651)
at com.sun.enterprise.web.WebApplication.start(WebApplication.java:109)
at org.glassfish.internal.data.EngineRef.start(EngineRef.java:130)
at org.glassfish.internal.data.ModuleInfo.start(ModuleInfo.java:269)
at org.glassfish.internal.data.ApplicationInfo.start(ApplicationInfo.java:294)
at com.sun.enterprise.v3.server.ApplicationLifecycle.deploy(ApplicationLifecycle.java:462)
at com.sun.enterprise.v3.server.ApplicationLifecycle.deploy(ApplicationLifecycle.java:240)
at org.glassfish.deployment.admin.DeployCommand.execute(DeployCommand.java:382)
at com.sun.enterprise.v3.admin.CommandRunnerImpl$1.execute(CommandRunnerImpl.java:355)
at com.sun.enterprise.v3.admin.CommandRunnerImpl.doCommand(CommandRunnerImpl.java:370)
at com.sun.enterprise.v3.admin.CommandRunnerImpl.doCommand(CommandRunnerImpl.java:1064)
at com.sun.enterprise.v3.admin.CommandRunnerImpl.access$1200(CommandRunnerImpl.java:96)
at com.sun.enterprise.v3.admin.CommandRunnerImpl$ExecutionContext.execute(CommandRunnerImpl.java:1244)
at com.sun.enterprise.v3.admin.CommandRunnerImpl$ExecutionContext.execute(CommandRunnerImpl.java:1232)
at com.sun.enterprise.admin.cli.embeddable.DeployerImpl.deploy(DeployerImpl.java:129)
at com.sun.enterprise.admin.cli.embeddable.DeployerImpl.deploy(DeployerImpl.java:105)
at org.glassfish.maven.PluginUtil.doDeploy(PluginUtil.java:106)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.glassfish.maven.AbstractDeployMojo.doDeploy(AbstractDeployMojo.java:239)
at org.glassfish.maven.RunMojo.execute(RunMojo.java:68)
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:107)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:209)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:84)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:59)
at org.apache.maven.lifecycle.internal.LifecycleStarter.singleThreadedBuild(LifecycleStarter.java:183)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:161)
at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:319)
at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:156)
at org.apache.maven.cli.MavenCli.execute(MavenCli.java:534)
at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:196)
at org.apache.maven.cli.MavenCli.main(MavenCli.java:141)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:290)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:230)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:409)
at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:352)
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'entityManagerFactory' defined in class path resource [com/marc/embeddedglassfish/config/PersistenceContext.class]: Invocation of init method failed; nested exception is java.lang.reflect.UndeclaredThrowableException
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1486)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:524)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:461)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.context.support.AbstractApplicationContext.getBean(AbstractApplicationContext.java:1117)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:922)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.web.context.ContextLoader.configureAndRefreshWebApplicationContext(ContextLoader.java:383)
at org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:283)
at org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:112)
at org.apache.catalina.core.StandardContext.contextListenerStart(StandardContext.java:4750)
at com.sun.enterprise.web.WebModule.contextListenerStart(WebModule.java:550)
at org.apache.catalina.core.StandardContext.start(StandardContext.java:5366)
... 49 more
Caused by: java.lang.reflect.UndeclaredThrowableException
at com.sun.proxy.$Proxy147.addTransformer(Unknown Source)
at org.eclipse.persistence.jpa.PersistenceProvider.createContainerEntityManagerFactory(PersistenceProvider.java:231)
at org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean.createNativeEntityManagerFactory(LocalContainerEntityManagerFactoryBean.java:287)
at org.springframework.orm.jpa.AbstractEntityManagerFactoryBean.afterPropertiesSet(AbstractEntityManagerFactoryBean.java:310)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1545)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1483)
... 64 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.springframework.orm.jpa.persistenceunit.DefaultPersistenceUnitManager$Jpa2PersistenceUnitInfoDecorator.invoke(DefaultPersistenceUnitManager.java:617)
... 70 more
Caused by: java.lang.IllegalStateException: Cannot apply class transformer without LoadTimeWeaver specified
at org.springframework.orm.jpa.persistenceunit.SpringPersistenceUnitInfo.addTransformer(SpringPersistenceUnitInfo.java:109)
... 75 more
PersistenceContext.java
package com.marc.embeddedglassfish.config;
import java.util.Properties;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.jdbc.datasource.lookup.JndiDataSourceLookup;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.support.AbstractPlatformTransactionManager;
import com.marc.springmvc3.dao.PersonDAO;
#Configuration
#EnableTransactionManagement
#EnableJpaRepositories(basePackages = {"com.marc.springmvc3.repositories"})
public class PersistenceContext {
/* glassfish requires "java:app/"
Deployment Descriptor. An application-scoped resource is defined in the glassfish-resources.xml deployment descriptor file.
This file is placed in the META-INF directory of the module or application archive. For web applications or modules,
this file is placed in the WEB-INF directory. If any submodule archives of an enterprise application archive have their
own glassfish-resources.xml files, the resource definitions are scoped to those modules only. For more information about
the glassfish-resources.xml file, see Appendix B, GlassFish Server Deployment Descriptor Files and Appendix C, Elements of the
GlassFish Server Deployment Descriptors.
Naming. Application-scoped resource JNDI names begin with java:app or java:module. If one of these prefixes is not specified
in the JNDI name, it is added. For example, application-scoped databases have JNDI names in the following format:
java:app/jdbc/DataSourceName or java:module/jdbc/DataSourceName. This is in accordance with the naming scopes introduced
in the Java EE 6 Specification.
http://docs.oracle.com/cd/E18930_01/html/821-2417/giydj.html
oracle glassfish application deployment guide
*/
#Bean
public DataSource dataSource() {
final JndiDataSourceLookup dsLookup = new JndiDataSourceLookup();
dsLookup.setResourceRef(true);
DataSource dataSource = dsLookup.getDataSource("java:app/jdbc/TestDB");
return dataSource;
}
#Bean
public PersonDAO personDao() {
PersonDAO personDao = new PersonDAO();
personDao.setDataSource(dataSource());
return personDao;
}
#Bean
public AbstractPlatformTransactionManager transactionManager() {
return new JpaTransactionManager(entityManagerFactory());
}
#Bean
public DataSource dataSourceForEntityManager() {
final JndiDataSourceLookup dsLookup = new JndiDataSourceLookup();
dsLookup.setResourceRef(true);
// "jdbc/__TimerPool" is defined by default in embedded glassfish
DataSource dataSource = dsLookup.getDataSource("jdbc/__TimerPool");
return dataSource;
}
#Bean
public EntityManagerFactory entityManagerFactory() {
final LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
EclipseLinkJpaVendorAdapter vendor = new EclipseLinkJpaVendorAdapter();
vendor.setDatabase(Database.DERBY);
vendor.setGenerateDdl(true);
vendor.setShowSql(true);
entityManagerFactoryBean.setJpaVendorAdapter(vendor);
entityManagerFactoryBean.setJtaDataSource(dataSourceForEntityManager());
entityManagerFactoryBean.setPackagesToScan(new String[] { "com.marc.springmvc3.model" });
final Properties props = new Properties();
props.setProperty("javax.persistence.jdbc.driver", "org.apache.derby.jdbc.EmbeddedDriver");
props.setProperty("eclipselink.target-database", "DERBY");
props.setProperty("eclipselink.ddl-generation", "drop-and-create-table");
props.setProperty("eclipselink.logging.level.sql", "FINEST");
props.setProperty("eclipselink.logging.parameters", "true");
entityManagerFactoryBean.setJpaProperties(props);
entityManagerFactoryBean.afterPropertiesSet();
return entityManagerFactoryBean.getObject();
}
}
PersonService.java
package com.marc.springmvc3.service;
import java.util.List;
import javax.inject.Inject;
import org.springframework.stereotype.Service;
import com.marc.springmvc3.model.Person;
import com.marc.springmvc3.repositories.PersonRepository;
#Service
public class PersonService {
#Inject
private PersonRepository repository;
#Inject
private List<Person> persons;
public List<Person> getAllPersons() {
Person person = new Person();
person.setFirstName("Lucky");
person.setName("Luke");
repository.save(person);
person.setFirstName("");
person.setName("");
Person dbPerson = repository.findOne(person.getId());
persons.add(dbPerson);
return persons;
}
}
PersonRepository.java
package com.marc.springmvc3.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import com.marc.springmvc3.model.Person;
public interface PersonRepository extends JpaRepository<Person, Integer> {
}
Initiliazer
package com.marc.embeddedglassfish.initializer;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
import org.springframework.web.WebApplicationInitializer;
import org.springframework.web.context.ContextLoaderListener;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.servlet.DispatcherServlet;
public class EmbeddedGlassfishInitializer implements WebApplicationInitializer {
private static final String DISPATCHER_SERVLET_NAME = "dispatcher";
private static final String DISPATCHER_SERVLET_MAPPING = "/";
#Override
public void onStartup(ServletContext servletContext) throws ServletException {
WebApplicationContext context = getContext();
servletContext.addListener(new ContextLoaderListener(context));
ServletRegistration.Dynamic dispatcher = servletContext.addServlet("DispatcherServlet", new DispatcherServlet(context));
dispatcher.setLoadOnStartup(1);
dispatcher.addMapping("/");
}
private AnnotationConfigWebApplicationContext getContext() {
AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
context.setConfigLocation("com.marc.embeddedglassfish.config");
return context;
}
}
I solved it
The interesting part of the stack trace is at the end:
Caused by: java.lang.IllegalStateException: Cannot apply class transformer without LoadTimeWeaver specified
The explaination here helped me : EclipseLinkJpaVendorAdapter instead of HibernateJpaVendorAdapter issue
I still don't know how to get an EntitManagerFactory from glassfish as JNDI in the Java based application context configuration. Any idea ?
It should be:
#Bean
public FactoryBean<EntityManagerFactory> entityManagerFactory() {
...
entityManagerFactoryBean.setJpaProperties(props);
return entityManagerFactoryBean;
}
#Bean
public PlatformTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
return new JpaTransactionManager(entityManagerFactory);
}
It's not recommended to instantiate any FactoryBean directly from code: there is need just rely on Spring contaier.
To use a FactoryBean result it is nessary to follow with parameter injection strategy.
I am trying to get the variables uploaded directly based on environment in which the code is running. I wrote some code and ran into issues. Can you please point me what i am doing wrong.
WebAppInitializer.java
public class WebAppInitializer implements WebApplicationInitializer {
#Override
public void onStartup(ServletContext sc) throws ServletException {
// Create the 'root' Spring application context
AnnotationConfigWebApplicationContext root = new AnnotationConfigWebApplicationContext();
root.scan("com.configs");
root.getEnvironment().setActiveProfiles("dev");
root.refresh();
// Manages the lifecycle of the root application context
sc.addListener(new ContextLoaderListener(root));
// Handles requests into the application
ServletRegistration.Dynamic appServlet = sc.addServlet("appServlet",
new DispatcherServlet(new GenericWebApplicationContext()));
appServlet.setLoadOnStartup(1);
Set<String> mappingConflicts = appServlet.addMapping("/");
if (!mappingConflicts.isEmpty()) {
throw new IllegalStateException(
"'appServlet' could not be mapped to '/' due "
+ "to an existing mapping. This is a known issue under Tomcat versions "
+ "<= 7.0.14; see https://issues.apache.org/bugzilla/show_bug.cgi?id=51278");
}
}
}
DynamicConfig.java
#Configuration
#Profile("dev")
#PropertySource("classpath:/devel.properties")
public class DynamicConfig {
#Autowired
Environment env;
#Bean
public TestClass testClass(){
TestClass testClass = new TestClass();
testClass.setEnvironment(env.getProperty("environment"));
return testClass;
}
}
TestClass is simple class with one instance variable which will come from config based on environment.
TestCase:
package com.tester;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import com.configs.DynamicConfig;
import com.configs.EnvironmentDetector;
import com.tester.TestClass;
public class TestClassTest {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(DynamicConfig.class);
#Test
public void test(){
ApplicationContext context = new AnnotationConfigApplicationContext(DynamicConfig.class);
context.getEnvironment().setActiveProfiles("dev");
context.scan("com.configs");
TestClass test = context.getBean(TestClass.class);
System.out.println(test.getEnvironment());
}
}
Now I am getting below error
Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 0.282 sec <<< FAILURE!
test(com.tester.TestClassTest) Time elapsed: 0.279 sec <<< ERROR!
org.springframework.beans.factory.NoSuchBeanDefinitionException: No unique bean of type [com.tester.TestClass] is defined: expected single bean but found 0:
at org.springframework.beans.factory.support.DefaultListableBeanFactory.getBean(DefaultListableBeanFactory.java:280)
at org.springframework.context.support.AbstractApplicationContext.getBean(AbstractApplicationContext.java:1106)
at com.tester.TestClassTest.test(TestClassTest.java:16)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:50)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.apache.maven.surefire.junit4.JUnit4TestSet.execute(JUnit4TestSet.java:35)
at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:146)
at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:97)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.maven.surefire.booter.ProviderFactory$ClassLoaderProxy.invoke(ProviderFactory.java:103)
at $Proxy0.invoke(Unknown Source)
at org.apache.maven.surefire.booter.SurefireStarter.invokeProvider(SurefireStarter.java:145)
at org.apache.maven.surefire.booter.SurefireStarter.runSuitesInProcess(SurefireStarter.java:87)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:69)
When i remove the line #profile("dev") from the DynamicConfig.java code is running fine. But i want that. I want to create similar classes for prod.
Please help
Thanks
Update your test class to activate the profile. Currently you are activating it by adding the property in the WebApplicationInitializer which is not run in the context of your test.
One way to activate it would be doing the following, however it is not the only way:
#RunWith(SpringJUnit4ClassRunner.class)
#ActiveProfiles({"dev"})
#ContextConfiguration(classes = {DynamicConfig.class})
public class TestClassTest {
#Autowired
TestClass testClass;
#Test
public void test(){
System.out.println(testClass.getEnvironment());
}