ClassNotFoundException after job submission - spring

I'm trying out Spring Data - Hadoop for executing the MR code on a remote cluster from my local machine's IDE
//Hadoop 1.1.2, Spring 3.2.4, Spring-Data-Hadoop 1.0.0
Tried with these versions :
Hadoop 1.2.1, Spring 4.0.1, Spring-Data-Hadoop 2.0.2
applicationContext.xml :
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:hdp="http://www.springframework.org/schema/hadoop"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.2.xsd">
<context:property-placeholder location="resources/hadoop.properties" />
<hdp:configuration file-system-uri="${hd.fs}" job-tracker-uri="${hd.jobtracker.uri}">
</hdp:configuration>
<hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
reducer="com.hadoop.basics.WordCounter.WCReducer" input-path="${wordcount.input.path}"
output-path="${wordcount.output.path}" user="bigdata">
</hdp:job>
<hdp:job-runner id="myjobs-runner" job-ref="wc-job"
run-at-startup="true" />
<hdp:resource-loader id="resourceLoader" uri="${hd.fs}"
user="bigdata" />
</beans>
WordCounter.java :
package com.hadoop.basics;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class WordCounter {
private static IntWritable one = new IntWritable(1);
public static class WCMapper extends Mapper<Text, Text, Text, IntWritable> {
#Override
protected void map(
Text key,
Text value,
org.apache.hadoop.mapreduce.Mapper<Text, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
StringTokenizer strTokenizer = new StringTokenizer(value.toString());
Text token = new Text();
while (strTokenizer.hasMoreTokens()) {
token.set(strTokenizer.nextToken());
context.write(token, one);
}
}
}
public static class WCReducer extends
Reducer<Text, IntWritable, Text, IntWritable> {
#Override
protected void reduce(
Text key,
Iterable<IntWritable> values,
org.apache.hadoop.mapreduce.Reducer<Text, IntWritable, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
public static void main(String[] args) {
AbstractApplicationContext context = new ClassPathXmlApplicationContext(
"applicationContext.xml", WordCounter.class);
System.out.println("Word Count Application Running");
context.registerShutdownHook();
}
}
The output is :
Aug 23, 2013 11:07:48 AM org.springframework.context.support.AbstractApplicationContext prepareRefresh
INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext#1815338: startup date [Fri Aug 23 11:07:48 IST 2013]; root of context hierarchy
Aug 23, 2013 11:07:48 AM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
Aug 23, 2013 11:07:48 AM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
INFO: Loading properties file from class path resource [resources/hadoop.properties]
Aug 23, 2013 11:07:48 AM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Aug 23, 2013 11:07:49 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
INFO: Starting job [wc-job]
Aug 23, 2013 11:07:49 AM org.apache.hadoop.mapred.JobClient copyAndConfigureFiles
WARNING: No job jar file set. User classes may not be found. See JobConf(Class) or JobConf#setJar(String).
Aug 23, 2013 11:07:49 AM org.apache.hadoop.mapreduce.lib.input.FileInputFormat listStatus
INFO: Total input paths to process : 1
Aug 23, 2013 11:07:50 AM org.apache.hadoop.util.NativeCodeLoader <clinit>
WARNING: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Aug 23, 2013 11:07:50 AM org.apache.hadoop.io.compress.snappy.LoadSnappy <clinit>
WARNING: Snappy native library not loaded
Aug 23, 2013 11:07:52 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: Running job: job_201308231532_0002
Aug 23, 2013 11:07:53 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: map 0% reduce 0%
Aug 23, 2013 11:08:12 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: Task Id : attempt_201308231532_0002_m_000000_0, Status : FAILED
java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
... 8 more
Aug 23, 2013 11:08:33 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: Task Id : attempt_201308231532_0002_m_000000_1, Status : FAILED
java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
... 8 more
Aug 23, 2013 11:08:51 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: Task Id : attempt_201308231532_0002_m_000000_2, Status : FAILED
java.lang.RuntimeException: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:849)
at org.apache.hadoop.mapreduce.JobContext.getMapperClass(JobContext.java:199)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:719)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:370)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.ClassNotFoundException: com.hadoop.basics.WordCounter$WCMapper
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:423)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:356)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:802)
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:847)
... 8 more
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.JobClient monitorAndPrintJob
INFO: Job complete: job_201308231532_0002
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Counters: 7
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Job Counters
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: SLOTS_MILLIS_MAPS=86688
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Total time spent by all reduces waiting after reserving slots (ms)=0
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Total time spent by all maps waiting after reserving slots (ms)=0
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Launched map tasks=4
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Data-local map tasks=4
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: SLOTS_MILLIS_REDUCES=0
Aug 23, 2013 11:09:24 AM org.apache.hadoop.mapred.Counters log
INFO: Failed map tasks=1
Aug 23, 2013 11:09:24 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
INFO: Completed job [wc-job]
Aug 23, 2013 11:09:24 AM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: Job wc-job] failed to start; status=FAILED
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
Caused by: java.lang.IllegalStateException: Job wc-job] failed to start; status=FAILED
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:219)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
... 13 more
What config. have I missed? Is it really possible to submit a Hadoop job remotely using Spring Data without creation of a jar etc. ?

You can try jar-by-class attribute to locate the jar and let hadoop upload the jar to the TaskTracker.
<hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
reducer="com.hadoop.basics.WordCounter.WCReducer"
input-path="${wordcount.input.path}"
jar-by-class="com.hadoop.basics.WordCounter"
output-path="${wordcount.output.path}" user="bigdata">
</hdp:job>
At last, WCMapper and WCReducer should be static, or they can not be created by hadoop.

I had same problem with my very first hadoop job here, and the solution was with way of compiling the jar file in eclipse.
When you want to export java project as a jar file in eclipse, two options are available:
Extract required libraries into generated JAR
Package required libraries into generated JAR
First option solved my problem and probably will solve yours.

I was getting the same issue then separating the mapper and reducer class works and the following changes were done in applicationContext.xml:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:util="http://www.springframework.org/schema/util"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:hdp="http://www.springframework.org/schema/hadoop" xmlns:batch="http://www.springframework.org/schema/batch"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/batch http://www.springframework.org/schema/batch/spring-batch.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.2.xsd">
<context:property-placeholder location="classpath:application.properties" />
<hdp:configuration namenode-principal="hdfs://xx.yy.com" rm-manager-uri="xx.yy.com"
security-method="kerb" user-keytab="location" rm-manager-principal="username"
user-principal="username">
fs.default.name=${fs.default.name}
mapred.job.tracker=${mapred.job.tracker}
</hdp:configuration>
<hdp:job id="wordCountJobId" input-path="${input.path}"
output-path="${output.path}" jar-by-class="com.xx.poc.Application"
mapper="com.xx.poc.Map" reducer="com.xx.poc.Reduce" />
<hdp:job-runner id="wordCountJobRunner" job-ref="wordCountJobId"
run-at-startup="true" />
</beans>

Right Click on your Project, Build Path -> Configure Build Path. Select Libraries tab, Click Add External JARs and copy all the Required JAR files in your hadoop directory. I guess, this will solve your problem.

Related

CannotLoadBeanClassException: Cannot find class for bean with name defined in ServletContext resource [/WEB-INF/sping-servlet.xml]

In spring-servlet.xml configration file of my maven spring project, It occurs an exception when I run project after build by maven.
INFO: Initializing Servlet 'spring'
Loading class `com.mysql.jdbc.Driver'. This is deprecated. The new driver class is `com.mysql.cj.jdbc.Driver'. The driver is automatically registered via the SPI and manual loading of the driver class is generally unnecessary.
Jun 19, 2019 4:05:10 PM org.springframework.context.support.AbstractApplicationContext refresh
WARNING: Exception encountered during context initialization - cancelling refresh attempt: org.springframework.beans.factory.CannotLoadBeanClassException: Cannot find class [daoImp.EmployeeCruzDaoImp] for bean with name 'dao' defined in ServletContext resource [/WEB-INF/spring-servlet.xml]; nested exception is java.lang.ClassNotFoundException: daoImp.EmployeeCruzDaoImp
Jun 19, 2019 4:05:10 PM org.springframework.web.servlet.FrameworkServlet initServletBean
SEVERE: Context initialization failed
org.springframework.beans.factory.CannotLoadBeanClassException: Cannot find class [daoImp.EmployeeCruzDaoImp] for bean with name 'dao' defined in ServletContext resource [/WEB-INF/spring-servlet.xml]; nested exception is java.lang.ClassNotFoundException: daoImp.EmployeeCruzDaoImp
at org.springframework.beans.factory.support.AbstractBeanFactory.resolveBeanClass(AbstractBeanFactory.java:1382)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.determineTargetType(AbstractAutowireCapableBeanFactory.java:663)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.predictBeanType(AbstractAutowireCapableBeanFactory.java:630)
at org.springframework.beans.factory.support.AbstractBeanFactory.isFactoryBean(AbstractBeanFactory.java:1491)
at org.springframework.beans.factory.support.AbstractBeanFactory.isFactoryBean(AbstractBeanFactory.java:1014)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:826)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:863)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:546)
at org.springframework.web.servlet.FrameworkServlet.configureAndRefreshWebApplicationContext(FrameworkServlet.java:696)
at org.springframework.web.servlet.FrameworkServlet.createWebApplicationContext(FrameworkServlet.java:662)
at org.springframework.web.servlet.FrameworkServlet.createWebApplicationContext(FrameworkServlet.java:710)
at org.springframework.web.servlet.FrameworkServlet.initWebApplicationContext(FrameworkServlet.java:587)
at org.springframework.web.servlet.FrameworkServlet.initServletBean(FrameworkServlet.java:526)
at org.springframework.web.servlet.HttpServletBean.init(HttpServletBean.java:169)
at javax.servlet.GenericServlet.init(GenericServlet.java:158)
at org.apache.catalina.core.StandardWrapper.initServlet(StandardWrapper.java:1142)
at org.apache.catalina.core.StandardWrapper.loadServlet(StandardWrapper.java:1089)
at org.apache.catalina.core.StandardWrapper.load(StandardWrapper.java:983)
at org.apache.catalina.core.StandardContext.loadOnStartup(StandardContext.java:4956)
at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5270)
at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:150)
at org.apache.catalina.core.StandardContext.reload(StandardContext.java:3828)
at org.apache.catalina.loader.WebappLoader.backgroundProcess(WebappLoader.java:291)
at org.apache.catalina.core.StandardContext.backgroundProcess(StandardContext.java:5608)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1392)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1396)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1396)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.run(ContainerBase.java:1364)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: daoImp.EmployeeCruzDaoImp
at org.apache.catalina.loader.WebappClassLoaderBase.loadClass(WebappClassLoaderBase.java:1364)
at org.apache.catalina.loader.WebappClassLoaderBase.loadClass(WebappClassLoaderBase.java:1185)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:340)
at org.springframework.util.ClassUtils.forName(ClassUtils.java:275)
at org.springframework.beans.factory.support.AbstractBeanDefinition.resolveBeanClass(AbstractBeanDefinition.java:437)
at org.springframework.beans.factory.support.AbstractBeanFactory.doResolveBeanClass(AbstractBeanFactory.java:1430)
at org.springframework.beans.factory.support.AbstractBeanFactory.resolveBeanClass(AbstractBeanFactory.java:1374)
... 28 more

java.lang.NoClassDefFoundError: org/springframework/data/hadoop/configuration/ConfigurationFactoryBean

i am using hadoop2.5.2, spring4.1, spring-data-hadoop2.0.0 RELEASE.
Whenever I specify a configuration tag using the Spring Hadoop namespace in spring bean configuration file
Spring Data Hadoop XML config:
<?xml version="1.0" encoding="UTF-8"?>
<beans:beans xmlns="http://www.springframework.org/schema/hadoop"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:beans="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
<context:property-placeholder location="hadoop.properties"/>
<configuration>
fs.defaultFS=${hd.fs}
yarn.resourcemanager.address=${hd.rm}
mapreduce.framework.name=yarn
mapreduce.jobhistory.address=${hd.jh}
</configuration>
<job id="samplejob"
input-path="${input.path}"
output-path="${output.path}"
jar-by-class="com.ihub.BookXDriver"
mapper="com.ihub.BookXMapper"
reducer="com.ihub.BookXReducer"/>
<job-runner id="runner" run-at-startup="true"
job-ref="samplejob" />
</beans:beans>
stack trace:
2015-08-03 12:32:04,930 INFO [main] support.ClassPathXmlApplicationContext (AbstractApplicationContext.java:prepareRefresh(510)) - Refreshing org.springframework.context.support.ClassPathXmlApplicationContext#53533c55: startup date [Mon Aug 03 12:32:04 IST 2015]; root of context hierarchy
2015-08-03 12:32:05,067 INFO [main] xml.XmlBeanDefinitionReader (XmlBeanDefinitionReader.java:loadBeanDefinitions(317)) - Loading XML bean definitions from class path resource [com/ihub/config.xml]
Exception in thread "main" org.springframework.beans.factory.BeanDefinitionStoreException: Unexpected exception parsing XML document from class path resource [com/ihub/config.xml]; nested exception is java.lang.NoClassDefFoundError: org/springframework/data/hadoop/configuration/ConfigurationFactoryBean
at org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:414)
at org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:336)
at org.springframework.beans.factory.xml.XmlBeanDefinitionReader.loadBeanDefinitions(XmlBeanDefinitionReader.java:304)
at org.springframework.beans.factory.support.AbstractBeanDefinitionReader.loadBeanDefinitions(AbstractBeanDefinitionReader.java:180)
at org.springframework.beans.factory.support.AbstractBeanDefinitionReader.loadBeanDefinitions(AbstractBeanDefinitionReader.java:216)
at org.springframework.beans.factory.support.AbstractBeanDefinitionReader.loadBeanDefinitions(AbstractBeanDefinitionReader.java:187)
at org.springframework.beans.factory.support.AbstractBeanDefinitionReader.loadBeanDefinitions(AbstractBeanDefinitionReader.java:251)
at org.springframework.context.support.AbstractXmlApplicationContext.loadBeanDefinitions(AbstractXmlApplicationContext.java:127)
at org.springframework.context.support.AbstractXmlApplicationContext.loadBeanDefinitions(AbstractXmlApplicationContext.java:93)
at org.springframework.context.support.AbstractRefreshableApplicationContext.refreshBeanFactory(AbstractRefreshableApplicationContext.java:129)
at org.springframework.context.support.AbstractApplicationContext.obtainFreshBeanFactory(AbstractApplicationContext.java:537)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:452)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:139)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:83)
at com.ihub.BookXDriver.main(BookXDriver.java:50)
Caused by: java.lang.NoClassDefFoundError: org/springframework/data/hadoop/configuration/ConfigurationFactoryBean
at org.springframework.data.hadoop.config.HadoopConfigParser.getBeanClass(HadoopConfigParser.java:32)
at org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser.parseInternal(AbstractSingleBeanDefinitionParser.java:66)
at org.springframework.beans.factory.xml.AbstractBeanDefinitionParser.parse(AbstractBeanDefinitionParser.java:60)
at org.springframework.beans.factory.xml.NamespaceHandlerSupport.parse(NamespaceHandlerSupport.java:74)
at org.springframework.data.hadoop.config.HadoopNamespaceHandler.parse(HadoopNamespaceHandler.java:70)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseCustomElement(BeanDefinitionParserDelegate.java:1427)
at org.springframework.beans.factory.xml.BeanDefinitionParserDelegate.parseCustomElement(BeanDefinitionParserDelegate.java:1417)
at org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.parseBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:174)
at org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.doRegisterBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:144)
at org.springframework.beans.factory.xml.DefaultBeanDefinitionDocumentReader.registerBeanDefinitions(DefaultBeanDefinitionDocumentReader.java:100)
at org.springframework.beans.factory.xml.XmlBeanDefinitionReader.registerBeanDefinitions(XmlBeanDefinitionReader.java:510)
at org.springframework.beans.factory.xml.XmlBeanDefinitionReader.doLoadBeanDefinitions(XmlBeanDefinitionReader.java:392)
... 14 more
Caused by: java.lang.ClassNotFoundException: org.springframework.data.hadoop.configuration.ConfigurationFactoryBean
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 26 more
This error:
java.lang.ClassNotFoundException: org.springframework.data.hadoop.configuration.ConfigurationFactoryBean
is pointing out that some jar is missing from your project classpath.
ConfigurationFactoryBean class is present in spring-data-hadoop2.0.0 RELEASE. So ensure this jar is present in your project classpath.
In case required jar is present but still receiving this error, then its just that the path where jar is placed is not treated like a classpath for your project.

SQLException: ?? in Tomcat 7.0.42 Oracle

Trying to get my app to connect to oracle. Using ojdbc6.jar (with Java 6). Maven 3.0.4. Tomcat 7.0.42 (clean install). Here's my context.xml:
<Context reloadable="true" crossContext="false">
<Valve className="org.dcri.tomcat.auth.ExtendedFormAuthenticator" includePassword="false" />
<Realm className="org.dcri.tomcat.auth.DatabaseServerLoginModule"
connectionName="xxxxxx"
connectionPassword="xxxxxx"
connectionURL="jdbc:oracle:oci8:#xxxxxx"
driverName="oracle.jdbc.OracleDriver"
digest="SHA1"
.......
Using sqlplus, I'm able to login to the db no problem. when trying to connect with app I get:`
SEVERE: Exception opening database connection
java.sql.SQLException: ??
at oracle.jdbc.driver.SQLStateMapping.newSQLException(SQLStateMapping.java:70)
at oracle.jdbc.driver.DatabaseError.newSQLException(DatabaseError.java:112)
at oracle.jdbc.driver.DatabaseError.throwSqlException(DatabaseError.java:173)
at oracle.jdbc.driver.T2CConnection.checkError(T2CConnection.java:650)
at oracle.jdbc.driver.T2CConnection.logon(T2CConnection.java:338)
at oracle.jdbc.driver.PhysicalConnection.<init>(PhysicalConnection.java:490)
at oracle.jdbc.driver.T2CConnection.<init>(T2CConnection.java:133)
at oracle.jdbc.driver.T2CDriverExtension.getConnection(T2CDriverExtension.java:53)
at oracle.jdbc.driver.OracleDriver.connect(OracleDriver.java:465)
at org.apache.catalina.realm.JDBCRealm.open(JDBCRealm.java:710)
at org.apache.catalina.realm.JDBCRealm.startInternal(JDBCRealm.java:788)
at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:150)
at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5317)
at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:150)
at org.apache.catalina.core.ContainerBase.addChildInternal(ContainerBase.java:901)
at org.apache.catalina.core.ContainerBase.addChild(ContainerBase.java:877)
at org.apache.catalina.core.StandardHost.addChild(StandardHost.java:633)
at org.apache.catalina.startup.HostConfig.deployWAR(HostConfig.java:976)
at org.apache.catalina.startup.HostConfig$DeployWar.run(HostConfig.java:1653)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
at java.util.concurrent.FutureTask.run(FutureTask.java:138)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
at java.lang.Thread.run(Thread.java:619)
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#59bdbfec'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#4463a1ae'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#690bc995'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#5675b3ee'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#4977fa9a'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#58d51a54'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Configuring event listener class 'org.apache.catalina.deploy.ApplicationListener#5e54777e'
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.StandardContext listenerStart
FINE: Sending application start events
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.ApplicationContext log
INFO: Set web app root system property: 'webapp.root' = [C:\work\apache-tomcat-7.0.42\apache-tomcat-7.0.42\webapps\employee\]
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.ApplicationContext log
INFO: Initializing log4j from [C:\work\apache-tomcat-7.0.42\apache-tomcat-7.0.42\webapps\employee\WEB-INF\classes\log4j.xml]
Aug 20, 2013 2:05:49 PM org.apache.catalina.core.ApplicationContext log
INFO: Initializing Spring root WebApplicationContext
Aug 20, 2013 2:05:53 PM org.apache.catalina.core.StandardContext listenerStart
SEVERE: Exception sending context initialized event to listener instance of class org.springframework.web.context.ContextLoaderListener
org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'deploymentController' defined in ServletContext resource [/WEB-INF/applicationContext.xml]: Invocation of init method failed; nested exception is org.springframework.transaction.CannotCreateTransactionException: Could not open Hibernate Session for transaction; nested exception is org.hibernate.exception.GenericJDBCException: Cannot open connection
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1338)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:473)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory$1.run(AbstractAutowireCapableBeanFactory.java:409)
at java.security.AccessController.doPrivileged(Native Method)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:380)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:264)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:222)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:261)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:185)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:164)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:429)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:728)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:380)
at org.springframework.web.context.ContextLoader.createWebApplicationContext(ContextLoader.java:255)
at org.springframework.web.context.ContextLoader.initWebApplicationContext(ContextLoader.java:199)
at org.springframework.web.context.ContextLoaderListener.contextInitialized(ContextLoaderListener.java:45)
at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:4939)
at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5434)
at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:150)
at org.apache.catalina.core.ContainerBase.addChildInternal(ContainerBase.java:901)
at org.apache.catalina.core.ContainerBase.addChild(ContainerBase.java:877)
at org.apache.catalina.core.StandardHost.addChild(StandardHost.java:633)
at org.apache.catalina.startup.HostConfig.deployWAR(HostConfig.java:976)
at org.apache.catalina.startup.HostConfig$DeployWar.run(HostConfig.java:1653)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
at java.util.concurrent.FutureTask.run(FutureTask.java:138)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
at java.lang.Thread.run(Thread.java:619)
Caused by: org.springframework.transaction.CannotCreateTransactionException: Could not open Hibernate Session for transaction; nested exception is org.hibernate.exception.GenericJDBCException: Cannot open connection
at org.springframework.orm.hibernate3.HibernateTransactionManager.doBegin(HibernateTransactionManager.java:599)
at org.springframework.transaction.support.AbstractPlatformTransactionManager.getTransaction(AbstractPlatformTransactionManager.java:374)
at org.springframework.transaction.interceptor.TransactionAspectSupport.createTransactionIfNecessary(TransactionAspectSupport.java:263)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:101)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:171)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:204)
at $Proxy56.findAll(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:307)
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:182)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:149)
at org.springframework.aop.framework.adapter.ThrowsAdviceInterceptor.invoke(ThrowsAdviceInterceptor.java:126)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:171)
at edu.services.aop.TimingAdvice.invoke(TimingAdvice.java:32)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:171)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:204)
at $Proxy57.findAll(Unknown Source)
at edu.services.actions.DeploymentControllerImpl.init(DeploymentControllerImpl.java:232)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeCustomInitMethod(AbstractAutowireCapableBeanFactory.java:1414)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1375)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1335)
... 29 more
Caused by: org.hibernate.exception.GenericJDBCException: Cannot open connection
at org.hibernate.exception.SQLStateConverter.handledNonSpecificException(SQLStateConverter.java:140)
at org.hibernate.exception.SQLStateConverter.convert(SQLStateConverter.java:128)
at org.hibernate.exception.JDBCExceptionHelper.convert(JDBCExceptionHelper.java:66)
at org.hibernate.exception.JDBCExceptionHelper.convert(JDBCExceptionHelper.java:52)
at org.hibernate.jdbc.ConnectionManager.openConnection(ConnectionManager.java:449)
at org.hibernate.jdbc.ConnectionManager.getConnection(ConnectionManager.java:167)
at org.hibernate.jdbc.JDBCContext.connection(JDBCContext.java:142)
at org.hibernate.transaction.JDBCTransaction.begin(JDBCTransaction.java:85)
at org.hibernate.impl.SessionImpl.beginTransaction(SessionImpl.java:1463)
at org.springframework.orm.hibernate3.HibernateTransactionManager.doBegin(HibernateTransactionManager.java:558)
... 56 more
Caused by: org.apache.tomcat.dbcp.dbcp.SQLNestedException: Cannot create PoolableConnectionFactory (??)
at org.apache.tomcat.dbcp.dbcp.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:1549)
at org.apache.tomcat.dbcp.dbcp.BasicDataSource.createDataSource(BasicDataSource.java:1388)
at org.apache.tomcat.dbcp.dbcp.BasicDataSource.getConnection(BasicDataSource.java:1044)
at org.springframework.orm.hibernate3.LocalDataSourceConnectionProvider.getConnection(LocalDataSourceConnectionProvider.java:82)
at org.hibernate.jdbc.ConnectionManager.openConnection(ConnectionManager.java:446)
... 61 more
Not sure what the issue could be. The ?? doesn't really provide much guidance...ideas?
thanks

Spring data - hadoop connectivity

I'm trying out Spring Data - Hadoop for executing the MR code on a remote cluster from my local machine's IDE
Hadoop 1.1.2, Spring 3.2.4, Spring-Data-Hadoop 1.0.0
My bean configuration file viz. applicationContext.xml is as follows :
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:hdp="http://www.springframework.org/schema/hadoop"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.2.xsd">
<context:property-placeholder location="resources/hadoop.properties" />
<hdp:configuration file-system-uri="${hd.fs}" job-tracker-uri="${hd.jobtracker.uri}">
</hdp:configuration>
<hdp:job id="wc-job" mapper="com.hadoop.basics.WordCounter.WCMapper"
reducer="com.hadoop.basics.WordCounter.WCReducer" input-path="${wordcount.input.path}"
output-path="${wordcount.output.path}" user="bigdata">
</hdp:job>
<hdp:job-runner id="myjobs-runner" job-ref="wc-job"
run-at-startup="true" />
<hdp:resource-loader id="resourceLoader" uri="${hd.fs}"
user="bigdata" />
</beans>
hadoop.properties
hd.fs=hdfs://cloudx-843-770:9000
hd.jobtracker.uri=cloudx-843-770:9001
wordcount.input.path=/scratchpad/input/Childhood_days.txt
wordcount.output.path=/scratchpad/output
The java class which I'm doing 'Run as ...'
package com.hadoop.basics;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class WordCounter {
private static IntWritable one = new IntWritable(1);
public class WCMapper extends Mapper<Text, Text, Text, IntWritable> {
#Override
protected void map(
Text key,
Text value,
org.apache.hadoop.mapreduce.Mapper<Text, Text, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
StringTokenizer strTokenizer = new StringTokenizer(value.toString());
Text token = new Text();
while (strTokenizer.hasMoreTokens()) {
token.set(strTokenizer.nextToken());
context.write(token, one);
}
}
}
public class WCReducer extends
Reducer<Text, IntWritable, Text, IntWritable> {
#Override
protected void reduce(
Text key,
Iterable<IntWritable> values,
org.apache.hadoop.mapreduce.Reducer<Text, IntWritable, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
// TODO Auto-generated method stub
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
public static void main(String[] args) {
AbstractApplicationContext context = new ClassPathXmlApplicationContext(
"applicationContext.xml", WordCounter.class);
System.out.println("Word Count Application Running");
context.registerShutdownHook();
}
}
The output is :
Aug 22, 2013 9:59:02 AM org.springframework.context.support.AbstractApplicationContext prepareRefresh
INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext#1815338: startup date [Thu Aug 22 09:59:02 IST 2013]; root of context hierarchy
Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
Aug 22, 2013 9:59:03 AM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
INFO: Loading properties file from class path resource [resources/hadoop.properties]
Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Aug 22, 2013 9:59:03 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
INFO: Starting job [wc-job]
Aug 22, 2013 9:59:03 AM org.apache.hadoop.security.UserGroupInformation doAs
SEVERE: PriviledgedActionException as:bigdata via 298790 cause:org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
Aug 22, 2013 9:59:03 AM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
WARNING: Cannot start job [wc-job]
org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
Aug 22, 2013 9:59:03 AM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
Caused by: java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:209)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
... 13 more
Caused by: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
... 20 more
As is obvious, the user 298790(my local Windows machine user) is not recognized on the cluster - that's why in the config. file
I specified user="bigdata" in the job's configuration as mentioned in the doc.
The doc. also mentions :
SHDP obeys the HDFS permissions, using the identity of the current
user (by default) for interacting with the file system. In particular,
the HdfsResourceLoader considers when doing pattern matching, only the
files that its suppose to see and does not perform any privileged
action. It is possible however to specify a different user, meaning
the ResourceLoader interacts with HDFS using that user's rights
- however this obeys the user impersonation rules
As per the api, I decided to use HdfsResourceLoader but couldn't find any example or even configuration in the documentation - can anyone provide any pointers?
As per Hadoop Secure Impersonation, I believe that I need to add my Windows user 298790 to the remote cluster machine(Ubuntu)user's group and also my Windows host name which I find infeasible in case of large no. of users and changing Windows client machines. In case my assumption is correct, what can be done to avoid adding and configuring all these users?
/Adding the changes to core-site.xml/
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<!-- Put site-specific property overrides in this file. -->
<configuration>
<property>
<name>fs.default.name</name>
<value>hdfs://cloudx-843-770:9000</value>
</property>
<property>
<name>hadoop.proxyuser.298790.groups</name>
<value>bigdata</value>
<description>Allow the superuser bigdatato impersonate any members of the group bigdata</description>
</property>
<property>
<name>hadoop.proxyuser.298790.hosts</name>
<value>*</value>
<description>The superuser can connect only from INFVA03351 to impersonate a user</description>
</property>
</configuration>
I restarted all the Hadoop processes but the error persists.
Then, I decided to create a new user viz. 298790 on the remote Ubuntu machine and add it to the group bigdata for impersonation purposes :
root#cloudx-843-770:/home/bigdata# useradd -G bigdata 298790
root#cloudx-843-770:/home/bigdata#
root#cloudx-843-770:/home/bigdata#
root#cloudx-843-770:/home/bigdata# usermod -G bigdata 298790
root#cloudx-843-770:/home/bigdata#
root#cloudx-843-770:/home/bigdata# su 298790
$ groups
298790 bigdata
root#cloudx-843-770:/home/bigdata#
root#cloudx-843-770:/home/bigdata# cat /etc/passwd
root:x:0:0:root:/root:/bin/bash
daemon:x:1:1:daemon:/usr/sbin:/bin/sh
bin:x:2:2:bin:/bin:/bin/sh
sys:x:3:3:sys:/dev:/bin/sh
sync:x:4:65534:sync:/bin:/bin/sync
games:x:5:60:games:/usr/games:/bin/sh
man:x:6:12:man:/var/cache/man:/bin/sh
lp:x:7:7:lp:/var/spool/lpd:/bin/sh
mail:x:8:8:mail:/var/mail:/bin/sh
news:x:9:9:news:/var/spool/news:/bin/sh
uucp:x:10:10:uucp:/var/spool/uucp:/bin/sh
proxy:x:13:13:proxy:/bin:/bin/sh
www-data:x:33:33:www-data:/var/www:/bin/sh
backup:x:34:34:backup:/var/backups:/bin/sh
list:x:38:38:Mailing List Manager:/var/list:/bin/sh
irc:x:39:39:ircd:/var/run/ircd:/bin/sh
gnats:x:41:41:Gnats Bug-Reporting System (admin):/var/lib/gnats:/bin/sh
nobody:x:65534:65534:nobody:/nonexistent:/bin/sh
libuuid:x:100:101::/var/lib/libuuid:/bin/sh
syslog:x:101:103::/home/syslog:/bin/false
mysql:x:102:105:MySQL Server,,,:/nonexistent:/bin/false
messagebus:x:103:106::/var/run/dbus:/bin/false
whoopsie:x:104:107::/nonexistent:/bin/false
landscape:x:105:110::/var/lib/landscape:/bin/false
sshd:x:106:65534::/var/run/sshd:/usr/sbin/nologin
tomcat6:x:107:113::/usr/share/tomcat6:/bin/false
coesystem:x:1000:1000:coesystem,,,:/home/coesystem:/bin/bash
hpcc:x:999:1001:hpcc Runtime User:/home/hpcc:/bin/sh
hduser:x:1001:1002:hduser,1,1,1,1:/home/hduser:/bin/bash
bigdata:x:1002:1003:Big Data,1,1,1,1:/home/bigdata:/bin/bash
298790:x:1003:1004::/home/298790:/bin/sh
But now when I attempt to stop(and then start) the cluster, it asks for password for all processes :
bigdata#cloudx-843-770:~/hadoop_ecosystem/apache_hadoop/hadoop-1.1.2/bin$ stop-all.sh
Warning: $HADOOP_HOME is deprecated.
stopping jobtracker
bigdata#localhost's password:
localhost: stopping tasktracker
stopping namenode
bigdata#localhost's password:
localhost: stopping datanode
bigdata#localhost's password:
localhost: stopping secondarynamenode
And now the error is slightly modified - it first fails to connect and then to impersonate :
Aug 22, 2013 5:14:17 PM org.springframework.context.support.AbstractApplicationContext prepareRefresh
INFO: Refreshing org.springframework.context.support.ClassPathXmlApplicationContext#922804: startup date [Thu Aug 22 17:14:17 IST 2013]; root of context hierarchy
Aug 22, 2013 5:14:17 PM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
INFO: Loading XML bean definitions from class path resource [com/hadoop/basics/applicationContext.xml]
Aug 22, 2013 5:14:17 PM org.springframework.core.io.support.PropertiesLoaderSupport loadProperties
INFO: Loading properties file from class path resource [resources/hadoop.properties]
Aug 22, 2013 5:14:17 PM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Aug 22, 2013 5:14:18 PM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
INFO: Starting job [wc-job]
Aug 22, 2013 5:14:20 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
Aug 22, 2013 5:14:22 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
Aug 22, 2013 5:14:24 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
Aug 22, 2013 5:14:26 PM org.apache.hadoop.ipc.Client$Connection handleConnectionFailure
INFO: Retrying connect to server: cloudx-843-770/172.25.37.135:9001. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
Aug 22, 2013 5:14:36 PM org.apache.hadoop.security.UserGroupInformation doAs
SEVERE: PriviledgedActionException as:bigdata via 298790 cause:org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
Aug 22, 2013 5:14:36 PM org.springframework.data.hadoop.mapreduce.JobExecutor$2 run
WARNING: Cannot start job [wc-job]
org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
Aug 22, 2013 5:14:36 PM org.springframework.beans.factory.support.DefaultSingletonBeanRegistry destroySingletons
INFO: Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#7c197e: defining beans [org.springframework.context.support.PropertySourcesPlaceholderConfigurer#0,hadoopConfiguration,wc-job,myjobs-runner,resourceLoader]; root of factory hierarchy
Exception in thread "main" org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'myjobs-runner': Invocation of init method failed; nested exception is java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1482)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:521)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:458)
at org.springframework.beans.factory.support.AbstractBeanFactory$1.getObject(AbstractBeanFactory.java:295)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:223)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:292)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:194)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:628)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:932)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:479)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:197)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:172)
at org.springframework.context.support.ClassPathXmlApplicationContext.<init>(ClassPathXmlApplicationContext.java:158)
at com.hadoop.basics.WordCounter.main(WordCounter.java:58)
Caused by: java.lang.IllegalStateException: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:209)
at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:49)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:168)
at org.springframework.data.hadoop.mapreduce.JobExecutor.startJobs(JobExecutor.java:160)
at org.springframework.data.hadoop.mapreduce.JobRunner.call(JobRunner.java:52)
at org.springframework.data.hadoop.mapreduce.JobRunner.afterPropertiesSet(JobRunner.java:44)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.invokeInitMethods(AbstractAutowireCapableBeanFactory.java:1541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.initializeBean(AbstractAutowireCapableBeanFactory.java:1479)
... 13 more
Caused by: org.apache.hadoop.ipc.RemoteException: User: 298790 is not allowed to impersonate bigdata
at org.apache.hadoop.ipc.Client.call(Client.java:1107)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at org.apache.hadoop.mapred.$Proxy2.getProtocolVersion(Unknown Source)
at org.apache.hadoop.ipc.RPC.getProxy(RPC.java:411)
at org.apache.hadoop.mapred.JobClient.createRPCProxy(JobClient.java:499)
at org.apache.hadoop.mapred.JobClient.init(JobClient.java:490)
at org.apache.hadoop.mapred.JobClient.<init>(JobClient.java:473)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:513)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Unknown Source)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
at org.apache.hadoop.mapreduce.Job.connect(Job.java:511)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:499)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:530)
at org.springframework.data.hadoop.mapreduce.JobExecutor$2.run(JobExecutor.java:197)
... 20 more
If you ever wish to impersonate a user as another, Apache Hadoop requires that your service-side configuration allows such a thing.
This means, that if you're running as "foo", and you want to actually submit the job as "bar", then your NameNode/JobTracker requires its loaded core-site.xml config to be allowing "foo" to proxy other users, and typically something like the below must be present in the NameNode's/JobTracker's core-site.xml:
<property>
<name>hadoop.proxyuser.foo.groups</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.foo.hosts</name>
<value>*</value>
</property>
This would allow user foo to impersonate any other user (* for groups), and do so when submitting from any host (* for hosts).
While it isn't necessary, it is definitely recommended that defined users and groups need to exist on the NameNode for things such as permissions, group resolution, etc. to work in a proper fashion. More on that here: http://www.cloudera.com/blog/2012/03/authorization-and-authentication-in-hadoop/
The documentation at http://static.springsource.org/spring-hadoop/docs/1.0.x/reference/html/security.html#security:kerberos could be clearer in addressing this.

NullPointer exception in Spring Dependancy Injection with JSF2

i am using jsf2 and spring for just as DI. when i am making request from page i am getting NullPointer Exception. Means Autowiring is not working.
i am getting follwoing error. follwing is my whole Server log.
May 19, 2013 8:10:44 PM org.apache.catalina.startup.HostConfig deployDescriptor
INFO: Deploying configuration descriptor C:\Users\kshitij\AppData\Roaming\NetBeans\7.3\apache-tomcat-7.0.34.0_base\conf\Catalina\localhost\Spirng_jsf.xml
May 19, 2013 8:10:47 PM org.springframework.web.context.ContextLoader initWebApplicationContext
INFO: Root WebApplicationContext: initialization started
May 19, 2013 8:10:47 PM org.springframework.context.support.AbstractApplicationContext prepareRefresh
INFO: Refreshing Root WebApplicationContext: startup date [Sun May 19 20:10:47 IST 2013]; root of context hierarchy
May 19, 2013 8:10:47 PM org.springframework.beans.factory.xml.XmlBeanDefinitionReader loadBeanDefinitions
INFO: Loading XML bean definitions from ServletContext resource [/WEB-INF/beans.xml]
May 19, 2013 8:10:47 PM org.springframework.beans.factory.support.DefaultListableBeanFactory preInstantiateSingletons
INFO: Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory#34ab7a: defining beans [myBean,userdaoImpl,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor,org.springframework.context.annotation.ConfigurationClassPostProcessor$ImportAwareBeanPostProcessor#0]; root of factory hierarchy
May 19, 2013 8:10:47 PM org.springframework.web.context.ContextLoader initWebApplicationContext
INFO: Root WebApplicationContext: initialization completed in 714 ms
May 19, 2013 8:10:47 PM com.sun.faces.config.ConfigureListener contextInitialized
INFO: Initializing Mojarra 2.1.13 ( 20120907-1514) for context '/Spirng_jsf'
May 19, 2013 8:10:47 PM com.sun.faces.spi.InjectionProviderFactory createInstance
INFO: JSF1048: PostConstruct/PreDestroy annotations present. ManagedBeans methods marked with these annotations will have said annotations processed.
May 19, 2013 8:10:48 PM org.apache.catalina.util.LifecycleBase start
INFO: The start() method was called on component [StandardEngine[Catalina].StandardHost[localhost].StandardContext[/Spirng_jsf]] after start() had already been called. The second call will be ignored.
May 19, 2013 8:11:34 PM javax.faces.component.UIViewRoot$ViewMap put
WARNING: Setting non-serializable attribute value into ViewMap: (key: myBean, value class: com.ksh.excel.MyBean)
May 19, 2013 8:11:34 PM com.sun.faces.lifecycle.InvokeApplicationPhase execute
WARNING: #{myBean.kshitij}: java.lang.NullPointerException
javax.faces.FacesException: #{myBean.kshitij}: java.lang.NullPointerException
at com.sun.faces.application.ActionListenerImpl.processAction(ActionListenerImpl.java:117)
at javax.faces.component.UICommand.broadcast(UICommand.java:315)
at javax.faces.component.UIViewRoot.broadcastEvents(UIViewRoot.java:794)
at javax.faces.component.UIViewRoot.processApplication(UIViewRoot.java:1259)
at com.sun.faces.lifecycle.InvokeApplicationPhase.execute(InvokeApplicationPhase.java:81)
at com.sun.faces.lifecycle.Phase.doPhase(Phase.java:101)
at com.sun.faces.lifecycle.LifecycleImpl.execute(LifecycleImpl.java:118)
at javax.faces.webapp.FacesServlet.service(FacesServlet.java:593)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:305)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:210)
at org.netbeans.modules.web.monitor.server.MonitorFilter.doFilter(MonitorFilter.java:393)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:243)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:210)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:222)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:123)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:472)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:171)
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:99)
at org.apache.catalina.valves.AccessLogValve.invoke(AccessLogValve.java:936)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:118)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:407)
at org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1004)
at org.apache.coyote.AbstractProtocol$AbstractConnectionHandler.process(AbstractProtocol.java:589)
at org.apache.tomcat.util.net.JIoEndpoint$SocketProcessor.run(JIoEndpoint.java:310)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:885)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:907)
at java.lang.Thread.run(Thread.java:619)
Caused by: javax.faces.el.EvaluationException: java.lang.NullPointerException
at javax.faces.component.MethodBindingMethodExpressionAdapter.invoke(MethodBindingMethodExpressionAdapter.java:102)
at com.sun.faces.application.ActionListenerImpl.processAction(ActionListenerImpl.java:101)
... 26 more
Caused by: java.lang.NullPointerException
at com.ksh.excel.MyBean.kshitij(MyBean.java:26)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.el.parser.AstValue.invoke(AstValue.java:278)
at org.apache.el.MethodExpressionImpl.invoke(MethodExpressionImpl.java:274)
at com.sun.faces.facelets.el.TagMethodExpression.invoke(TagMethodExpression.java:105)
at javax.faces.component.MethodBindingMethodExpressionAdapter.invoke(MethodBindingMethodExpressionAdapter.java:88)
... 27 more
My Interface.
package com.ksh.excel;
import org.springframework.stereotype.Component;
#Component
public interface Userdao {
public void print();
}
Interface Implementation
package com.ksh.excel;
import org.springframework.stereotype.Component;
#Component
public class UserdaoImpl implements Userdao {
#Override
public void print()
{
System.out.println("Kshitij ***********************************");
}
}
and JSF managed Bean
package com.ksh.excel;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
#ManagedBean
#ViewScoped
#Component
public class MyBean {
#Autowired
private Userdao userdao;
public String kshitij()
{
userdao.print();
return null;
}
}
Remove #Component from your Userdao interface. And also add
<context:component-scan base-package="com.ksh.excel" />
in your applicationContext.xml. If you did not added it before. For further information you can look here.
Cheers!

Resources