I am using oozie 4.1.0 with CDH-5.5.1. I was enabling sla as answer to this question.
I removed all the FQCNs from oozie.services.ext property. It is using PostgreSQL in the back end. I dont have any credentials of it.
It started throwing exception as follows
2016-05-03 09:31:28,312 FATAL org.apache.oozie.service.Services: SERVER[data4] Runtime Exception during Services Load. Check your list of 'oozie.services' or 'oozie.services.ext'
2016-05-03 09:31:28,321 FATAL org.apache.oozie.service.Services: SERVER[data4] E0103: Could not load service classes, Cannot create PoolableConnectionFactory (FATAL: password authentication failed for user "oozie_oozie_server")
org.apache.oozie.service.ServiceException: E0103: Could not load service classes, Cannot create PoolableConnectionFactory (FATAL: password authentication failed for user "oozie_oozie_server")
at org.apache.oozie.service.Services.loadServices(Services.java:309)
at org.apache.oozie.service.Services.init(Services.java:213)
at org.apache.oozie.servlet.ServicesLoader.contextInitialized(ServicesLoader.java:46)
at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:4210)
at org.apache.catalina.core.StandardContext.start(StandardContext.java:4709)
at org.apache.catalina.core.ContainerBase.addChildInternal(ContainerBase.java:802)
at org.apache.catalina.core.ContainerBase.addChild(ContainerBase.java:779)
at org.apache.catalina.core.StandardHost.addChild(StandardHost.java:583)
at org.apache.catalina.startup.HostConfig.deployWAR(HostConfig.java:944)
at org.apache.catalina.startup.HostConfig.deployWARs(HostConfig.java:779)
at org.apache.catalina.startup.HostConfig.deployApps(HostConfig.java:505)
at org.apache.catalina.startup.HostConfig.start(HostConfig.java:1322)
at org.apache.catalina.startup.HostConfig.lifecycleEvent(HostConfig.java:325)
at org.apache.catalina.util.LifecycleSupport.fireLifecycleEvent(LifecycleSupport.java:142)
at org.apache.catalina.core.ContainerBase.start(ContainerBase.java:1068)
at org.apache.catalina.core.StandardHost.start(StandardHost.java:822)
at org.apache.catalina.core.ContainerBase.start(ContainerBase.java:1060)
at org.apache.catalina.core.StandardEngine.start(StandardEngine.java:463)
at org.apache.catalina.core.StandardService.start(StandardService.java:525)
at org.apache.catalina.core.StandardServer.start(StandardServer.java:759)
at org.apache.catalina.startup.Catalina.start(Catalina.java:595)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.catalina.startup.Bootstrap.start(Bootstrap.java:289)
at org.apache.catalina.startup.Bootstrap.main(Bootstrap.java:414)
Caused by: <openjpa-2.2.2-r422266:1468616 fatal general error> org.apache.openjpa.persistence.PersistenceException: Cannot create PoolableConnectionFactory (FATAL: password authentication failed for user "oozie_oozie_server")
at org.apache.openjpa.jdbc.sql.DBDictionaryFactory.newDBDictionary(DBDictionaryFactory.java:102)
at org.apache.openjpa.jdbc.conf.JDBCConfigurationImpl.getDBDictionaryInstance(JDBCConfigurationImpl.java:603)
at org.apache.openjpa.jdbc.meta.MappingRepository.endConfiguration(MappingRepository.java:1518)
at org.apache.openjpa.lib.conf.Configurations.configureInstance(Configurations.java:531)
at org.apache.openjpa.lib.conf.Configurations.configureInstance(Configurations.java:456)
at org.apache.openjpa.lib.conf.PluginValue.instantiate(PluginValue.java:120)
at org.apache.openjpa.conf.MetaDataRepositoryValue.instantiate(MetaDataRepositoryValue.java:68)
at org.apache.openjpa.lib.conf.ObjectValue.instantiate(ObjectValue.java:83)
at org.apache.openjpa.conf.OpenJPAConfigurationImpl.newMetaDataRepositoryInstance(OpenJPAConfigurationImpl.java:967)
at org.apache.openjpa.conf.OpenJPAConfigurationImpl.getMetaDataRepositoryInstance(OpenJPAConfigurationImpl.java:958)
at org.apache.openjpa.kernel.AbstractBrokerFactory.makeReadOnly(AbstractBrokerFactory.java:644)
at org.apache.openjpa.kernel.AbstractBrokerFactory.newBroker(AbstractBrokerFactory.java:203)
at org.apache.openjpa.kernel.DelegatingBrokerFactory.newBroker(DelegatingBrokerFactory.java:156)
at org.apache.openjpa.persistence.EntityManagerFactoryImpl.createEntityManager(EntityManagerFactoryImpl.java:227)
at org.apache.openjpa.persistence.EntityManagerFactoryImpl.createEntityManager(EntityManagerFactoryImpl.java:154)
at org.apache.openjpa.persistence.EntityManagerFactoryImpl.createEntityManager(EntityManagerFactoryImpl.java:60)
at org.apache.oozie.service.JPAService.getEntityManager(JPAService.java:500)
at org.apache.oozie.service.JPAService.init(JPAService.java:201)
at org.apache.oozie.service.Services.setServiceInternal(Services.java:386)
at org.apache.oozie.service.Services.setService(Services.java:372)
at org.apache.oozie.service.Services.loadServices(Services.java:305)
... 26 more
Caused by: org.apache.commons.dbcp.SQLNestedException: Cannot create PoolableConnectionFactory (FATAL: password authentication failed for user "oozie_oozie_server")
at org.apache.commons.dbcp.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:1549)
at org.apache.commons.dbcp.BasicDataSource.createDataSource(BasicDataSource.java:1388)
at org.apache.commons.dbcp.BasicDataSource.getConnection(BasicDataSource.java:1044)
at org.apache.openjpa.lib.jdbc.DelegatingDataSource.getConnection(DelegatingDataSource.java:110)
at org.apache.openjpa.lib.jdbc.DecoratingDataSource.getConnection(DecoratingDataSource.java:87)
at org.apache.openjpa.jdbc.sql.DBDictionaryFactory.newDBDictionary(DBDictionaryFactory.java:91)
... 46 more
Caused by: org.postgresql.util.PSQLException: FATAL: password authentication failed for user "oozie_oozie_server"
at org.postgresql.core.v3.ConnectionFactoryImpl.doAuthentication(ConnectionFactoryImpl.java:291)
at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:108)
at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:66)
at org.postgresql.jdbc2.AbstractJdbc2Connection.<init>(AbstractJdbc2Connection.java:125)
at org.postgresql.jdbc3.AbstractJdbc3Connection.<init>(AbstractJdbc3Connection.java:30)
at org.postgresql.jdbc3g.AbstractJdbc3gConnection.<init>(AbstractJdbc3gConnection.java:22)
at org.postgresql.jdbc4.AbstractJdbc4Connection.<init>(AbstractJdbc4Connection.java:30)
at org.postgresql.jdbc4.Jdbc4Connection.<init>(Jdbc4Connection.java:24)
at org.postgresql.Driver.makeConnection(Driver.java:393)
at org.postgresql.Driver.connect(Driver.java:267)
at org.apache.commons.dbcp.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:38)
at org.apache.commons.dbcp.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:582)
at org.apache.commons.dbcp.BasicDataSource.validateConnectionFactory(BasicDataSource.java:1556)
at org.apache.commons.dbcp.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:1545)
... 51 more
You can check once in the catalina.log in /var/log/oozie/ directory, if there is any error.
Apart from that, hope, you didn't change the DB settings for Oozie manually.
You can verify it in Oozie default Configuration post.
Otherwise, you can re-create the Oozie service and launch.
Related
When i run sqoop job from edge node , its working fine and able to extract data from oracle, But when i schedule the same job in crontab, it throw kerberos security error.
I found same problem reported in hortonworks site(see the link), but does not have any valid answer.
https://community.hortonworks.com/questions/61856/kerberos-ticket-error-in-a-cron-job.html
I has valid kerberos ticket before cron job start and its valid for 10 hours, But when i schedule cron job in 5 minutes, it throw kerberos error.
Please suggest, what are the steps need to be followed before starting sqoop import in cron job.
Below is the detailed error message :
17/11/23 11:24:17 ERROR tool.ImportTool: Encountered IOException running import job: java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException:
GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)];
Host Details : local host is: "ps2pr028380.express-scripts.com/10.221.171.22"; destination host is: "ps2pr028377.express-scripts.com":8020;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:782)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1558)
at org.apache.hadoop.ipc.Client.call(Client.java:1498)
at org.apache.hadoop.ipc.Client.call(Client.java:1398)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
at com.sun.proxy.$Proxy11.getDelegationToken(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDelegationToken(ClientNamenodeProtocolTranslatorPB.java:980)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185)
at com.sun.proxy.$Proxy12.getDelegationToken(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getDelegationToken(DFSClient.java:1041)
at org.apache.hadoop.hdfs.DistributedFileSystem.getDelegationToken(DistributedFileSystem.java:1688)
at org.apache.hadoop.fs.FileSystem.collectDelegationTokens(FileSystem.java:549)
at org.apache.hadoop.fs.FileSystem.addDelegationTokens(FileSystem.java:527)
at org.apache.hadoop.hdfs.DistributedFileSystem.addDelegationTokens(DistributedFileSystem.java:2400)
at org.apache.hadoop.mapreduce.security.TokenCache.obtainTokensForNamenodesInternal(TokenCache.java:140)
at org.apache.hadoop.mapreduce.security.TokenCache.obtainTokensForNamenodesInternal(TokenCache.java:100)
at org.apache.hadoop.mapreduce.security.TokenCache.obtainTokensForNamenodes(TokenCache.java:80)
at org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.checkOutputSpecs(FileOutputFormat.java:142)
at org.apache.hadoop.mapreduce.JobSubmitter.checkSpecs(JobSubmitter.java:266)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:139)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1290)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1287)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1287)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1308)
at org.apache.sqoop.mapreduce.ImportJobBase.doSubmitJob(ImportJobBase.java:200)
at org.apache.sqoop.mapreduce.ImportJobBase.runJob(ImportJobBase.java:173)
at org.apache.sqoop.mapreduce.ImportJobBase.runImport(ImportJobBase.java:270)
at org.apache.sqoop.manager.SqlManager.importQuery(SqlManager.java:748)
at org.apache.sqoop.manager.OracleManager.importQuery(OracleManager.java:454)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:509)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:615)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:225)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.main(Sqoop.java:243)
Caused by: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
at org.apache.hadoop.ipc.Client$Connection$1.run(Client.java:720)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
at org.apache.hadoop.ipc.Client$Connection.handleSaslConnectionFailure(Client.java:683)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:770)
at org.apache.hadoop.ipc.Client$Connection.access$3200(Client.java:397)
at org.apache.hadoop.ipc.Client.getConnection(Client.java:1620)
at org.apache.hadoop.ipc.Client.call(Client.java:1451)
... 43 more
Caused by: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:413)
at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:595)
at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:397)
at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:762)
at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:758)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:757)
... 46 more
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
... 55 more
I solved my own question : I added below comment in my script and called this before sqoop start.
kinit -kt /home/userName/userName.keytab userName#PROD.DATAHUB.LOCAL
We are using hortonwork cluster, so they have another solution for this problem, please find the other solution link below
https://community.hortonworks.com/questions/149168/sqoop-with-kerberos-security-not-working-in-cron-t.html
I'm currently trying to connect via beeline to a kerberized cluster.
I can do the kinit with my principal (klist confirmes it), but when I try to connect to my Hive Server I get this error.
17/11/03 15:52:02 [main]: ERROR transport.TSaslTransport: SASL negotiation failure
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:212)
at org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
at org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
at org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
at org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:203)
at org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:168)
at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:105)
at java.sql.DriverManager.getConnection(DriverManager.java:571)
at java.sql.DriverManager.getConnection(DriverManager.java:187)
at org.apache.hive.beeline.DatabaseConnection.connect(DatabaseConnection.java:142)
at org.apache.hive.beeline.DatabaseConnection.getConnection(DatabaseConnection.java:207)
at org.apache.hive.beeline.Commands.connect(Commands.java:1457)
at org.apache.hive.beeline.Commands.connect(Commands.java:1352)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hive.beeline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:52)
at org.apache.hive.beeline.BeeLine.execCommandWithPrefix(BeeLine.java:1130)
at org.apache.hive.beeline.BeeLine.dispatch(BeeLine.java:1169)
at org.apache.hive.beeline.BeeLine.execute(BeeLine.java:1002)
at org.apache.hive.beeline.BeeLine.begin(BeeLine.java:914)
at org.apache.hive.beeline.BeeLine.mainWithInputRedirection(BeeLine.java:510)
at org.apache.hive.beeline.BeeLine.main(BeeLine.java:493)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)
at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:121)
at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:223)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
... 35 more
HS2 may be unavailable, check server status
Error: Could not open client transport with JDBC Uri: (.connection String deleted by me :P.) : GSS initiate failed (state=08S01,code=0)
This works like a charm inside the cluster nodes. I can do exactly the same thing inside one of the cluster machines, but not on an outside computer, with a valid KDC ticket.
What am I missing here? We had a similar situation with Oracle ODI, but I think we solved this issue by creating environment variables in the ODI Agent node (that had jars, libs and every conf of Hive and Hadoop).
The error seems to be related with KDC... but I have a valid ticket... so... I'm confused. Remember that I tested the same process inside one of the cluster nodes and it worked perfectly.
(this outside cluster machine is not administrated by me)
I have configured my Hive Over Hadoop and Hbase using tutorialspoint
http://www.tutorialspoint.com/hive/hive_installation.htm
Facing this issue with HIVE. Please Help
Logging initialized using configuration in jar:file:/usr/local/hive/apache-hive-1.2.1-bin/lib/hive-common-1.2.1.jar!/hive-log4j.properties. Exception in thread "main" java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)Caused by: javax.jdo.JDOFatalDataStoreException: Unable to open a test connection to the given database. JDBC url = jdbc:derby://localhost:1527/metastore_db;create=true, username = APP. Terminating connection pool (set lazyInit to true if you expect to start your database after your app). Original Exception: ------
java.sql.SQLNonTransientConnectionException: java.net.ConnectException : Error connecting to server localhost on port 1527 with message Connection refused.
at org.apache.derby.client.am.SQLExceptionFactory40.getSQLException(Unknown Source)
at org.apache.derby.client.am.SqlException.getSQLException(Unknown Source)
at org.apache.derby.jdbc.ClientDriver.connect(Unknown Source)
at java.sql.DriverManager.getConnection(DriverManager.java:571)
at java.sql.DriverManager.getConnection(DriverManager.java:187)
at com.jolbox.bonecp.BoneCP.obtainRawInternalConnection(BoneCP.java:361)
at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:416)
at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:501)
at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:298)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
It looks like you haven't started Derby database. Take a look at this Hive doc: https://cwiki.apache.org/confluence/display/Hive/HiveDerbyServerMode. There is a short paragraph on how to start Derby.
Derby Server doesn't run.So it hive throws connection refused exception.
GOto %DERBY_HOME%\bin and StartNetworkServer.cmd file
Now restart hive services.
Hope this really helpful for you
I follow the instruction of this page to install single machine yarn cluster http://hadoop.apache.org/docs/r2.0.5-alpha/hadoop-project-dist/hadoop-common/SingleCluster.html
But when I run the example jar, the job hang there and I check the log of resource manager, find the following error (the first is client side log, the second is resource manager log )
(Client side)
13/10/18 17:30:36 ERROR security.UserGroupInformation: PriviledgedActionException as:zhangj82 (auth:SIMPLE) cause:java.io.IOException
java.io.IOException
at org.apache.hadoop.mapred.ClientServiceDelegate.invoke(ClientServiceDelegate.java:326)
at org.apache.hadoop.mapred.ClientServiceDelegate.getJobStatus(ClientServiceDelegate.java:385)
at org.apache.hadoop.mapred.YARNRunner.getJobStatus(YARNRunner.java:526)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:313)
at org.apache.hadoop.mapreduce.Job$1.run(Job.java:310)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1478)
at org.apache.hadoop.mapreduce.Job.updateStatus(Job.java:310)
at org.apache.hadoop.mapreduce.Job.isComplete(Job.java:594)
at org.apache.hadoop.mapreduce.Job.monitorAndPrintJob(Job.java:1277)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1239)
at org.apache.hadoop.examples.RandomWriter.run(RandomWriter.java:283)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.hadoop.examples.RandomWriter.main(RandomWriter.java:294)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:72)
at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:144)
at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
Resource Manager
2013-10-18 17:35:26,128 INFO org.apache.hadoop.ipc.Server: IPC Server listener on 8040: readAndProcess threw exception javax.security.sasl.SaslException: DIGEST-MD5: IO error acquiring password [Caused by org.apache.hadoop.security.token.SecretManager$InvalidToken: Can't de-serialize tokenIdentifier] from client 127.0.0.1. Count of bytes read: 0
javax.security.sasl.SaslException: DIGEST-MD5: IO error acquiring password [Caused by org.apache.hadoop.security.token.SecretManager$InvalidToken: Can't de-serialize tokenIdentifier]
at com.sun.security.sasl.digest.DigestMD5Server.validateClientResponse(DigestMD5Server.java:594)
at com.sun.security.sasl.digest.DigestMD5Server.evaluateResponse(DigestMD5Server.java:244)
at org.apache.hadoop.ipc.Server$Connection.saslReadAndProcess(Server.java:1173)
at org.apache.hadoop.ipc.Server$Connection.readAndProcess(Server.java:1350)
at org.apache.hadoop.ipc.Server$Listener.doRead(Server.java:726)
at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:525)
at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:500)
Caused by: org.apache.hadoop.security.token.SecretManager$InvalidToken: Can't de-serialize tokenIdentifier
at org.apache.hadoop.security.SaslRpcServer.getIdentifier(SaslRpcServer.java:112)
at org.apache.hadoop.security.SaslRpcServer$SaslDigestCallbackHandler.handle(SaslRpcServer.java:217)
at com.sun.security.sasl.digest.DigestMD5Server.validateClientResponse(DigestMD5Server.java:585)
... 6 more
Caused by: java.io.EOFException
at java.io.DataInputStream.readFully(DataInputStream.java:197)
at org.apache.hadoop.io.Text.readFields(Text.java:306)
at org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier.readFields(AbstractDelegationTokenIdentifier.java:186)
at org.apache.hadoop.security.SaslRpcServer.getIdentifier(SaslRpcServer.java:109)
... 8 more
2013-10-18 17:35:26,308 INFO org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl: container_1382088798449_0001_01_000001 Container Transitioned from ACQUIRED to RUNNING
This bug been raised in Hadoop issues. Basically to overcome this you may apply source level patch as described in BlockTokenSecretManager or try to update to latest version of Hadoop
I am building a distributed application using Distributed OSGi and Zookeeper. The application should run on Felix and Equinox. I register my services as JAX-WS webservices using BundleContext.registerService(...) and query them with a ServiceTracker<S,T>. So all service handling is done by the OSGi framework. Each service call does some calculations so it takes some time before responding to the caller.
The problem is that I get java.net.SocketTimeoutException: Read timed out exceptions when the service call takes more than 60 seconds. So it seems that there is a default timeout of 60 seconds. Does anybody know how I can configure the timeout that is used by the OSGi framework?
This is the full stack trace:
Feb 06, 2012 11:27:02 AM org.apache.cxf.phase.PhaseInterceptorChain doDefaultLogging
WARNING: Interceptor for {http://test.com/}LB#{http://test.com/}createSession has thrown exception, unwinding now
org.apache.cxf.interceptor.Fault: Could not send Message.
at org.apache.cxf.interceptor.MessageSenderInterceptor$MessageSenderEndingInterceptor.handleMessage(MessageSenderInterceptor.java:64)
at org.apache.cxf.phase.PhaseInterceptorChain.doIntercept(PhaseInterceptorChain.java:243)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:487)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:313)
at org.apache.cxf.endpoint.ClientImpl.invoke(ClientImpl.java:265)
at org.apache.cxf.frontend.ClientProxy.invokeSync(ClientProxy.java:73)
at org.apache.cxf.frontend.ClientProxy.invoke(ClientProxy.java:68)
at $Proxy34.createSession(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.apache.cxf.dosgi.dsw.handlers.ServiceInvocationHandler$1.run(ServiceInvocationHandler.java:65)
at java.security.AccessController.doPrivileged(Native Method)
at org.apache.cxf.dosgi.dsw.handlers.ServiceInvocationHandler.invoke(ServiceInvocationHandler.java:63)
at $Proxy34.createSession(Unknown Source)
at test.Consumer$LoadGeneratingThread.run(Consumer.java:122)
Caused by: java.net.SocketTimeoutException: SocketTimeoutException invoking http://192.168.0.15:9090/LB: Read timed out
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:525)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.mapException(HTTPConduit.java:2058)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.close(HTTPConduit.java:2043)
at org.apache.cxf.transport.AbstractConduit.close(AbstractConduit.java:66)
at org.apache.cxf.transport.http.HTTPConduit.close(HTTPConduit.java:639)
at org.apache.cxf.interceptor.MessageSenderInterceptor$MessageSenderEndingInterceptor.handleMessage(MessageSenderInterceptor.java:62)
... 16 more
Caused by: java.net.SocketTimeoutException: Read timed out
at java.net.SocketInputStream.socketRead0(Native Method)
at java.net.SocketInputStream.read(SocketInputStream.java:150)
at java.net.SocketInputStream.read(SocketInputStream.java:121)
at java.io.BufferedInputStream.fill(BufferedInputStream.java:235)
at java.io.BufferedInputStream.read1(BufferedInputStream.java:275)
at java.io.BufferedInputStream.read(BufferedInputStream.java:334)
at sun.net.www.http.HttpClient.parseHTTPHeader(HttpClient.java:641)
at sun.net.www.http.HttpClient.parseHTTP(HttpClient.java:589)
at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1319)
at java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:468)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.handleResponseInternal(HTTPConduit.java:2165)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.handleResponse(HTTPConduit.java:2134)
at org.apache.cxf.transport.http.HTTPConduit$WrappedOutputStream.close(HTTPConduit.java:1988)
... 19 more
I solved this problem. I'm using cxf-dosgi-ri-singlebundle-distribution-1.2.jar
Open the file /META-INF/cxf/cxf.xml and add some content:
xmlns:http-conf="http://cxf.apache.org/transports/http/configuration"
xsi:schemaLocation="......
http://cxf.apache.org/transports/http/configuration
http://cxf.apache.org/schemas/configuration/http-conf.xsd"
<http-conf:conduit name="*.conduit">
<http-conf:client ConnectionTimeout="10000" ReceiveTimeout="20000"/>
</http-conf:conduit>
That's ok now!