Hadoop Job: Error injecting constructor, JAXBException - hadoop

A MapReduce job implemented in an Apache Crunch pipeline is failing with the error message Error injecting constructor, javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported.
The Crunch pipeline is very similar to other functioning pipelines; does anyone have any theories or intuitions about the errors seen below?
Thank you for any help.
An excerpt of the application log can be found below:
2016-02-16 13:34:31,925 INFO [main] org.mortbay.log: Extract jar:file:/data7/hadoop/yarn/local/filecache/96/mapreduce.tar.gz/hadoop/share/hadoop/yarn/hadoop-yarn-common-2.7.1.2.3.4.0-3485.jar!/webapps/mapreduce to /data6/hadoop/yarn/local/usercache/anonymizedusername/appcache/application_1455519038503_6094/container_e26_1455519038503_6094_02_000001/tmp/Jetty_0_0_0_0_52016_mapreduce____.kjcl3z/webapp
2016-02-16 13:34:32,627 WARN [main] org.mortbay.log: failed guice: com.google.inject.ProvisionException: Guice provision errors:
1) Error injecting constructor, javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:70)
at org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.setup(AMWebApp.java:33)
while locating org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver
1 error
2016-02-16 13:34:32,628 ERROR [main] org.mortbay.log: Failed startup of context org.mortbay.jetty.webapp.WebAppContext#1981d861{/,jar:file:/data7/hadoop/yarn/local/filecache/96/mapreduce.tar.gz/hadoop/share/hadoop/yarn/hadoop-yarn-common-2.7.1.2.3.4.0-3485.jar!/webapps/mapreduce}
com.google.inject.ProvisionException: Guice provision errors:
1) Error injecting constructor, javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:70)
at org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.setup(AMWebApp.java:33)
while locating org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver
1 error
at com.google.inject.internal.InjectorImpl$4.get(InjectorImpl.java:987)
at com.google.inject.internal.InjectorImpl.getInstance(InjectorImpl.java:1013)
at com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory$GuiceInstantiatedComponentProvider.getInstance(GuiceComponentProviderFactory.java:332)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory$ManagedSingleton.<init>(IoCProviderFactory.java:179)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory.wrap(IoCProviderFactory.java:100)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory._getComponentProvider(IoCProviderFactory.java:93)
at com.sun.jersey.core.spi.component.ProviderFactory.getComponentProvider(ProviderFactory.java:153)
at com.sun.jersey.core.spi.component.ProviderServices.getComponent(ProviderServices.java:251)
at com.sun.jersey.core.spi.component.ProviderServices.getProviders(ProviderServices.java:148)
at com.sun.jersey.core.spi.factory.ContextResolverFactory.init(ContextResolverFactory.java:83)
at com.sun.jersey.server.impl.application.WebApplicationImpl._initiate(WebApplicationImpl.java:1271)
at com.sun.jersey.server.impl.application.WebApplicationImpl.access$700(WebApplicationImpl.java:169)
at com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:775)
at com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:771)
at com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193)
at com.sun.jersey.server.impl.application.WebApplicationImpl.initiate(WebApplicationImpl.java:771)
at com.sun.jersey.guice.spi.container.servlet.GuiceContainer.initiate(GuiceContainer.java:121)
at com.sun.jersey.spi.container.servlet.ServletContainer$InternalWebComponent.initiate(ServletContainer.java:318)
at com.sun.jersey.spi.container.servlet.WebComponent.load(WebComponent.java:609)
at com.sun.jersey.spi.container.servlet.WebComponent.init(WebComponent.java:210)
at com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:373)
at com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:710)
at com.google.inject.servlet.FilterDefinition.init(FilterDefinition.java:114)
at com.google.inject.servlet.ManagedFilterPipeline.initPipeline(ManagedFilterPipeline.java:98)
at com.google.inject.servlet.GuiceFilter.init(GuiceFilter.java:172)
at org.mortbay.jetty.servlet.FilterHolder.doStart(FilterHolder.java:97)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.servlet.ServletHandler.initialize(ServletHandler.java:713)
at org.mortbay.jetty.servlet.Context.startContext(Context.java:140)
at org.mortbay.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1282)
at org.mortbay.jetty.handler.ContextHandler.doStart(ContextHandler.java:518)
at org.mortbay.jetty.webapp.WebAppContext.doStart(WebAppContext.java:499)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerCollection.doStart(HandlerCollection.java:152)
at org.mortbay.jetty.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:156)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerWrapper.doStart(HandlerWrapper.java:130)
at org.mortbay.jetty.Server.doStart(Server.java:224)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:857)
at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:306)
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.serviceStart(MRClientService.java:142)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1148)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
Caused by: javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported
at com.sun.xml.bind.v2.ContextFactory.createContext(ContextFactory.java:63)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:247)
at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:234)
at javax.xml.bind.ContextFinder.find(ContextFinder.java:441)
at javax.xml.bind.JAXBContext.newInstance(JAXBContext.java:641)
at com.sun.jersey.api.json.JSONJAXBContext.<init>(JSONJAXBContext.java:246)
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:72)
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver$$FastClassByGuice$$d18e5b53.newInstance(<generated>)
at com.google.inject.internal.cglib.reflect.$FastConstructor.newInstance(FastConstructor.java:40)
at com.google.inject.internal.DefaultConstructionProxyFactory$1.newInstance(DefaultConstructionProxyFactory.java:60)
at com.google.inject.internal.ConstructorInjector.construct(ConstructorInjector.java:85)
at com.google.inject.internal.ConstructorBindingImpl$Factory.get(ConstructorBindingImpl.java:254)
at com.google.inject.internal.ProviderToInternalFactoryAdapter$1.call(ProviderToInternalFactoryAdapter.java:46)
at com.google.inject.internal.InjectorImpl.callInContext(InjectorImpl.java:1031)
at com.google.inject.internal.ProviderToInternalFactoryAdapter.get(ProviderToInternalFactoryAdapter.java:40)
at com.google.inject.Scopes$1$1.get(Scopes.java:65)
at com.google.inject.internal.InternalFactoryToProviderAdapter.get(InternalFactoryToProviderAdapter.java:40)
at com.google.inject.internal.InjectorImpl$4$1.call(InjectorImpl.java:978)
at com.google.inject.internal.InjectorImpl.callInContext(InjectorImpl.java:1024)
at com.google.inject.internal.InjectorImpl$4.get(InjectorImpl.java:974)
... 50 more
2016-02-16 13:34:32,641 INFO [main] org.mortbay.log: Started HttpServer2$SelectChannelConnectorWithSafeStartup#0.0.0.0:52016
2016-02-16 13:34:32,642 INFO [main] org.mortbay.log: Stopped HttpServer2$SelectChannelConnectorWithSafeStartup#0.0.0.0:52016
2016-02-16 13:34:32,642 WARN [2076864428#qtp-1188623367-1 - Acceptor0 HttpServer2$SelectChannelConnectorWithSafeStartup#0.0.0.0:52016] org.apache.hadoop.http.HttpServer2: HttpServer Acceptor: isRunning is false. Rechecking.
2016-02-16 13:34:32,643 WARN [2076864428#qtp-1188623367-1 - Acceptor0 HttpServer2$SelectChannelConnectorWithSafeStartup#0.0.0.0:52016] org.apache.hadoop.http.HttpServer2: HttpServer Acceptor: isRunning is false
2016-02-16 13:34:32,746 ERROR [main] org.apache.hadoop.mapreduce.v2.app.client.MRClientService: Webapps failed to start. Ignoring for now:
org.apache.hadoop.yarn.webapp.WebAppException: Error starting http server
at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:310)
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.serviceStart(MRClientService.java:142)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1148)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
Caused by: java.io.IOException: Unable to initialize WebAppContext
at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:879)
at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:306)
... 10 more
Caused by: com.google.inject.ProvisionException: Guice provision errors:
1) Error injecting constructor, javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:70)
at org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.setup(AMWebApp.java:33)
while locating org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver
1 error
at com.google.inject.internal.InjectorImpl$4.get(InjectorImpl.java:987)
at com.google.inject.internal.InjectorImpl.getInstance(InjectorImpl.java:1013)
at com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory$GuiceInstantiatedComponentProvider.getInstance(GuiceComponentProviderFactory.java:332)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory$ManagedSingleton.<init>(IoCProviderFactory.java:179)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory.wrap(IoCProviderFactory.java:100)
at com.sun.jersey.core.spi.component.ioc.IoCProviderFactory._getComponentProvider(IoCProviderFactory.java:93)
at com.sun.jersey.core.spi.component.ProviderFactory.getComponentProvider(ProviderFactory.java:153)
at com.sun.jersey.core.spi.component.ProviderServices.getComponent(ProviderServices.java:251)
at com.sun.jersey.core.spi.component.ProviderServices.getProviders(ProviderServices.java:148)
at com.sun.jersey.core.spi.factory.ContextResolverFactory.init(ContextResolverFactory.java:83)
at com.sun.jersey.server.impl.application.WebApplicationImpl._initiate(WebApplicationImpl.java:1271)
at com.sun.jersey.server.impl.application.WebApplicationImpl.access$700(WebApplicationImpl.java:169)
at com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:775)
at com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:771)
at com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193)
at com.sun.jersey.server.impl.application.WebApplicationImpl.initiate(WebApplicationImpl.java:771)
at com.sun.jersey.guice.spi.container.servlet.GuiceContainer.initiate(GuiceContainer.java:121)
at com.sun.jersey.spi.container.servlet.ServletContainer$InternalWebComponent.initiate(ServletContainer.java:318)
at com.sun.jersey.spi.container.servlet.WebComponent.load(WebComponent.java:609)
at com.sun.jersey.spi.container.servlet.WebComponent.init(WebComponent.java:210)
at com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:373)
at com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:710)
at com.google.inject.servlet.FilterDefinition.init(FilterDefinition.java:114)
at com.google.inject.servlet.ManagedFilterPipeline.initPipeline(ManagedFilterPipeline.java:98)
at com.google.inject.servlet.GuiceFilter.init(GuiceFilter.java:172)
at org.mortbay.jetty.servlet.FilterHolder.doStart(FilterHolder.java:97)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.servlet.ServletHandler.initialize(ServletHandler.java:713)
at org.mortbay.jetty.servlet.Context.startContext(Context.java:140)
at org.mortbay.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1282)
at org.mortbay.jetty.handler.ContextHandler.doStart(ContextHandler.java:518)
at org.mortbay.jetty.webapp.WebAppContext.doStart(WebAppContext.java:499)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerCollection.doStart(HandlerCollection.java:152)
at org.mortbay.jetty.handler.ContextHandlerCollection.doStart(ContextHandlerCollection.java:156)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.mortbay.jetty.handler.HandlerWrapper.doStart(HandlerWrapper.java:130)
at org.mortbay.jetty.Server.doStart(Server.java:224)
at org.mortbay.component.AbstractLifeCycle.start(AbstractLifeCycle.java:50)
at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:857)
... 11 more
Caused by: javax.xml.bind.JAXBException: property "retainReferenceToInfo" is not supported
at com.sun.xml.bind.v2.ContextFactory.createContext(ContextFactory.java:63)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:247)
at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:234)
at javax.xml.bind.ContextFinder.find(ContextFinder.java:441)
at javax.xml.bind.JAXBContext.newInstance(JAXBContext.java:641)
at com.sun.jersey.api.json.JSONJAXBContext.<init>(JSONJAXBContext.java:246)
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:72)
at org.apache.hadoop.mapreduce.v2.app.webapp.JAXBContextResolver$$FastClassByGuice$$d18e5b53.newInstance(<generated>)
at com.google.inject.internal.cglib.reflect.$FastConstructor.newInstance(FastConstructor.java:40)
at com.google.inject.internal.DefaultConstructionProxyFactory$1.newInstance(DefaultConstructionProxyFactory.java:60)
at com.google.inject.internal.ConstructorInjector.construct(ConstructorInjector.java:85)
at com.google.inject.internal.ConstructorBindingImpl$Factory.get(ConstructorBindingImpl.java:254)
at com.google.inject.internal.ProviderToInternalFactoryAdapter$1.call(ProviderToInternalFactoryAdapter.java:46)
at com.google.inject.internal.InjectorImpl.callInContext(InjectorImpl.java:1031)
at com.google.inject.internal.ProviderToInternalFactoryAdapter.get(ProviderToInternalFactoryAdapter.java:40)
at com.google.inject.Scopes$1$1.get(Scopes.java:65)
at com.google.inject.internal.InternalFactoryToProviderAdapter.get(InternalFactoryToProviderAdapter.java:40)
at com.google.inject.internal.InjectorImpl$4$1.call(InjectorImpl.java:978)
at com.google.inject.internal.InjectorImpl.callInContext(InjectorImpl.java:1024)
at com.google.inject.internal.InjectorImpl$4.get(InjectorImpl.java:974)
... 50 more
2016-02-16 13:34:32,755 INFO [main] org.apache.hadoop.ipc.CallQueueManager: Using callQueue class java.util.concurrent.LinkedBlockingQueue
2016-02-16 13:34:32,756 INFO [Socket Reader #1 for port 57690] org.apache.hadoop.ipc.Server: Starting Socket Reader #1 for port 57690
2016-02-16 13:34:32,762 INFO [IPC Server Responder] org.apache.hadoop.ipc.Server: IPC Server Responder: starting
2016-02-16 13:34:32,762 INFO [IPC Server listener on 57690] org.apache.hadoop.ipc.Server: IPC Server listener on 57690: starting
2016-02-16 13:34:32,790 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: nodeBlacklistingEnabled:true
2016-02-16 13:34:32,791 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: maxTaskFailuresPerNode is 3
2016-02-16 13:34:32,791 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerRequestor: blacklistDisablePercent is 33
2016-02-16 13:34:32,893 ERROR [main] org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator: Exception while registering
java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.getHttpPort(MRClientService.java:174)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:156)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.serviceStart(RMCommunicator.java:121)
at org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator.serviceStart(RMContainerAllocator.java:250)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter.serviceStart(MRAppMaster.java:881)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:120)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1151)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
2016-02-16 13:34:32,893 INFO [main] org.apache.hadoop.service.AbstractService: Service RMCommunicator failed in state STARTED; cause: org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:177)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.serviceStart(RMCommunicator.java:121)
at org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator.serviceStart(RMContainerAllocator.java:250)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter.serviceStart(MRAppMaster.java:881)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:120)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1151)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
Caused by: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.getHttpPort(MRClientService.java:174)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:156)
... 14 more
2016-02-16 13:34:32,895 INFO [main] org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator: Final Stats: PendingReds:0 ScheduledMaps:0 ScheduledReds:0 AssignedMaps:0 AssignedReds:0 CompletedMaps:0 CompletedReds:0 ContAlloc:0 ContRel:0 HostLocal:0 RackLocal:0
2016-02-16 13:34:32,895 INFO [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter failed in state STARTED; cause: org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:177)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.serviceStart(RMCommunicator.java:121)
at org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator.serviceStart(RMContainerAllocator.java:250)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter.serviceStart(MRAppMaster.java:881)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:120)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1151)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
Caused by: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.getHttpPort(MRClientService.java:174)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:156)
... 14 more
2016-02-16 13:34:32,896 INFO [main] org.apache.hadoop.service.AbstractService: Service org.apache.hadoop.mapreduce.v2.app.MRAppMaster failed in state STARTED; cause: org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
org.apache.hadoop.yarn.exceptions.YarnRuntimeException: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:177)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.serviceStart(RMCommunicator.java:121)
at org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator.serviceStart(RMContainerAllocator.java:250)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$ContainerAllocatorRouter.serviceStart(MRAppMaster.java:881)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:120)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.serviceStart(MRAppMaster.java:1151)
at org.apache.hadoop.service.AbstractService.start(AbstractService.java:193)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster$5.run(MRAppMaster.java:1557)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.initAndStartAppMaster(MRAppMaster.java:1553)
at org.apache.hadoop.mapreduce.v2.app.MRAppMaster.main(MRAppMaster.java:1486)
Caused by: java.lang.NullPointerException
at org.apache.hadoop.mapreduce.v2.app.client.MRClientService.getHttpPort(MRClientService.java:174)
at org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator.register(RMCommunicator.java:156)
... 14 more

It was a jetty dependency collision in the org.apache.avro:avro-mapred artifact.

Related

Sqoop import into Hive - ERROR ("javax.management.MBeanTrustPermission" "register")

i'm facing this error when running a sqoop import command to hive and HDFS, the HDFS job runs without problems, but i can't make the same import to hive due to theese java errors.
log:
[sga-dl#localhost ~]$ sqoop import --connect jdbc:mysql://localhost:3306/v3_abrasivos --username hiveusr --password DATPASS --table History_Demand --m 1 --target-dir /raw_imports/History_Demand --hive-import --create-hive-table --hive-table sqoop_v3_abrasivos.History_Demand
...
20/02/04 09:50:01 INFO mapreduce.ImportJobBase: Transferred 794.1428 MB in 58.9304 seconds (13.476
20/02/04 09:50:01 INFO manager.SqlManager: Executing SQL statement: SELECT t.* FROM `History_Demand` AS t LIMIT 1
20/02/04 09:50:01 WARN hive.TableDefWriter: Column Dia had to be cast to a less precise type in Hive
20/02/04 09:50:01 INFO hive.HiveImport: Loading uploaded data into Hive
20/02/04 09:50:01 INFO conf.HiveConf: Found configuration file file:/opt/hive/conf/hive-site.xml
2020-02-04 09:50:02,957 main ERROR Could not register mbeans java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
at java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
at java.lang.SecurityManager.checkPermission(SecurityManager.java:585)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.checkMBeanTrustPermission(DefaultMBeanServerInterceptor.java:1848)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:322)
at com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522)
at org.apache.logging.log4j.core.jmx.Server.register(Server.java:380)
at org.apache.logging.log4j.core.jmx.Server.reregisterMBeansAfterReconfigure(Server.java:165)
at org.apache.logging.log4j.core.jmx.Server.reregisterMBeansAfterReconfigure(Server.java:138)
at org.apache.logging.log4j.core.LoggerContext.setConfiguration(LoggerContext.java:507)
at org.apache.logging.log4j.core.LoggerContext.start(LoggerContext.java:249)
at org.apache.logging.log4j.core.async.AsyncLoggerContext.start(AsyncLoggerContext.java:86)
at org.apache.logging.log4j.core.impl.Log4jContextFactory.getContext(Log4jContextFactory.java:239)
at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:157)
at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:130)
at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:100)
at org.apache.logging.log4j.core.config.Configurator.initialize(Configurator.java:187)
at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jDefault(LogUtils.java:154)
at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:90)
at org.apache.hadoop.hive.common.LogUtils.initHiveLog4jCommon(LogUtils.java:82)
at org.apache.hadoop.hive.common.LogUtils.initHiveLog4j(LogUtils.java:65)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:702)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:331)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:241)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:537)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
Logging initialized using configuration in jar:file:/usr/local/apache-hive-2.3.6-bin/lib/hive-common-2.3.6.jar!/hive-log4j2.properties Async: true
20/02/04 09:50:03 INFO SessionState:
Logging initialized using configuration in jar:file:/usr/local/apache-hive-2.3.6-bin/lib/hive-common-2.3.6.jar!/hive-log4j2.properties Async: true
20/02/04 09:50:03 INFO session.SessionState: Created HDFS directory: /tmp/hive/sga-dl/9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:03 INFO session.SessionState: Created local directory: /tmp/sga-dl/9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:03 INFO session.SessionState: Created HDFS directory: /tmp/hive/sga-dl/9133a12b-b9d9-4d17-ad47-3dc6ca872ab6/_tmp_space.db
20/02/04 09:50:03 INFO conf.HiveConf: Using the default value passed in for log id: 9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:03 INFO session.SessionState: Updating thread name to 9133a12b-b9d9-4d17-ad47-3dc6ca872ab6 main
20/02/04 09:50:03 INFO conf.HiveConf: Using the default value passed in for log id: 9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:03 INFO ql.Driver: Compiling command(queryId=sga-dl_20200204095003_9b2a8fbf-d798-4146-b57c-ffcb5920891b): CREATE TABLE `sqoop_v3_abrasivos.History_Demand` ( `idHistory_Demand` BIGINT, `Entidade_SalesOrg` STRING, `Cod_Material` BIGINT, `Material` STRING, `Plan_Group` STRING, `Linha_Produto` STRING, `MTS_MTO` STRING, `Cod_Tipo_OV` STRING, `Tipo_OV` STRING, `OV_Numero` BIGINT, `OV_Linha` INT, `Cod_Cliente` STRING, `Cliente` STRING, `Cod_Cliente_Corp` STRING, `Cliente_Corp` STRING, `Pais_Cliente` STRING, `UF_Cliente` STRING, `Estado_Cliente` STRING, `Cidade_Cliente` STRING, `Bairro_Cliente` STRING, `Sigla_Moeda` STRING, `OV_Moeda` STRING, `Cod_Sell_Plant` INT, `Sell_Plant` STRING, `Pais_SalesOrg` STRING, `Dia` STRING, `Periodo` STRING, `UN_Estoque` STRING, `Qtd_UN_Estoque` STRING, `Vendas_LOC` STRING) COMMENT 'Imported by sqoop on 2020/02/04 09:50:01' ROW FORMAT DELIMITED FIELDS TERMINATED BY '\001' LINES TERMINATED BY '\012' STORED AS TEXTFILE
20/02/04 09:50:05 INFO metastore.HiveMetaStore: 0: Opening raw store with implementation class:org.apache.hadoop.hive.metastore.ObjectStore
20/02/04 09:50:05 INFO metastore.ObjectStore: ObjectStore, initialize called
20/02/04 09:50:06 INFO DataNucleus.Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored
20/02/04 09:50:06 INFO DataNucleus.Persistence: Property datanucleus.cache.level2 unknown - will be ignored
20/02/04 09:50:06 ERROR bonecp.BoneCP: Unable to start/stop JMX
java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
at java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
at java.lang.SecurityManager.checkPermission(SecurityManager.java:585)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.checkMBeanTrustPermission(DefaultMBeanServerInterceptor.java:1848)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:322)
at com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522)
at com.jolbox.bonecp.BoneCP.registerUnregisterJMX(BoneCP.java:528)
at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:500)
at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:483)
at org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:297)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:606)
at org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
at org.datanucleus.NucleusContextHelper.createStoreManagerForProperties(NucleusContextHelper.java:133)
at org.datanucleus.PersistenceNucleusContextImpl.initialise(PersistenceNucleusContextImpl.java:422)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:817)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:334)
at org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:213)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
at java.security.AccessController.doPrivileged(Native Method)
at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
at javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
at org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:521)
at org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:550)
at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:405)
at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:58)
at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:79)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:164)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.createHiveDB(BaseSemanticAnalyzer.java:236)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.<init>(BaseSemanticAnalyzer.java:215)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.<init>(SemanticAnalyzer.java:362)
at org.apache.hadoop.hive.ql.parse.CalcitePlanner.<init>(CalcitePlanner.java:267)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory.get(SemanticAnalyzerFactory.java:318)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:484)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336)
at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:474)
at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:490)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:793)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:331)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:241)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:537)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
20/02/04 09:50:06 INFO metastore.ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"
20/02/04 09:50:08 ERROR bonecp.BoneCP: Unable to start/stop JMX
java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
at java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
at java.lang.SecurityManager.checkPermission(SecurityManager.java:585)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.checkMBeanTrustPermission(DefaultMBeanServerInterceptor.java:1848)
at com.sun.jmx.interceptor.DefaultMBeanServerInterceptor.registerMBean(DefaultMBeanServerInterceptor.java:322)
at com.sun.jmx.mbeanserver.JmxMBeanServer.registerMBean(JmxMBeanServer.java:522)
at com.jolbox.bonecp.BoneCP.registerUnregisterJMX(BoneCP.java:528)
at com.jolbox.bonecp.BoneCP.<init>(BoneCP.java:500)
at com.jolbox.bonecp.BoneCPDataSource.getConnection(BoneCPDataSource.java:120)
at org.datanucleus.store.rdbms.ConnectionProviderPriorityList.getConnection(ConnectionProviderPriorityList.java:57)
at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getConnection(ConnectionFactoryImpl.java:402)
at org.datanucleus.store.rdbms.ConnectionFactoryImpl$ManagedConnectionImpl.getXAResource(ConnectionFactoryImpl.java:361)
at org.datanucleus.store.connection.ConnectionManagerImpl.allocateConnection(ConnectionManagerImpl.java:316)
at org.datanucleus.store.connection.AbstractConnectionFactory.getConnection(AbstractConnectionFactory.java:84)
at org.datanucleus.store.AbstractStoreManager.getConnection(AbstractStoreManager.java:347)
at org.datanucleus.store.AbstractStoreManager.getConnection(AbstractStoreManager.java:310)
at org.datanucleus.store.rdbms.query.JDOQLQuery.performExecute(JDOQLQuery.java:591)
at org.datanucleus.store.query.Query.executeQuery(Query.java:1855)
at org.datanucleus.store.query.Query.executeWithArray(Query.java:1744)
at org.datanucleus.store.query.Query.execute(Query.java:1726)
at org.datanucleus.api.jdo.JDOQuery.executeInternal(JDOQuery.java:374)
at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:216)
at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:181)
at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:144)
at org.apache.hadoop.hive.metastore.ObjectStore.initializeHelper(ObjectStore.java:410)
at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:342)
at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:303)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:58)
at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:628)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:594)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:588)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:655)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:431)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:79)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:92)
at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:6902)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:164)
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:70)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1707)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652)
at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632)
at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894)
at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248)
at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231)
at org.apache.hadoop.hive.ql.metadata.Hive.<init>(Hive.java:388)
at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332)
at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312)
at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.createHiveDB(BaseSemanticAnalyzer.java:236)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.<init>(BaseSemanticAnalyzer.java:215)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.<init>(SemanticAnalyzer.java:362)
at org.apache.hadoop.hive.ql.parse.CalcitePlanner.<init>(CalcitePlanner.java:267)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory.get(SemanticAnalyzerFactory.java:318)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:484)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336)
at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:474)
at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:490)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:793)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:331)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:241)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:537)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
20/02/04 09:50:09 INFO metastore.MetaStoreDirectSql: Using direct SQL, underlying DB is MYSQL
20/02/04 09:50:09 INFO metastore.ObjectStore: Initialized ObjectStore
20/02/04 09:50:09 INFO metastore.HiveMetaStore: Added admin role in metastore
20/02/04 09:50:09 INFO metastore.HiveMetaStore: Added public role in metastore
20/02/04 09:50:09 INFO metastore.HiveMetaStore: No user is added in admin role, since config is empty
20/02/04 09:50:09 INFO metastore.HiveMetaStore: 0: get_all_functions
20/02/04 09:50:09 INFO HiveMetaStore.audit: ugi=sga-dl ip=unknown-ip-addr cmd=get_all_functions
20/02/04 09:50:09 INFO parse.CalcitePlanner: Starting Semantic Analysis
20/02/04 09:50:09 INFO parse.CalcitePlanner: Creating table sqoop_v3_abrasivos.History_Demand position=13
20/02/04 09:50:09 INFO metastore.HiveMetaStore: 0: get_database: sqoop_v3_abrasivos
20/02/04 09:50:09 INFO HiveMetaStore.audit: ugi=sga-dl ip=unknown-ip-addr cmd=get_database: sqoop_v3_abrasivos
20/02/04 09:50:09 WARN metastore.ObjectStore: Failed to get database sqoop_v3_abrasivos, returning NoSuchObjectException
FAILED: SemanticException [Error 10072]: Database does not exist: sqoop_v3_abrasivos
20/02/04 09:50:09 ERROR ql.Driver: FAILED: SemanticException [Error 10072]: Database does not exist: sqoop_v3_abrasivos
org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: sqoop_v3_abrasivos
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.getDatabase(BaseSemanticAnalyzer.java:1687)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.getDatabase(BaseSemanticAnalyzer.java:1676)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.addDbAndTabToOutputs(SemanticAnalyzer.java:12171)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeCreateTable(SemanticAnalyzer.java:12024)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:11020)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:11133)
at org.apache.hadoop.hive.ql.parse.CalcitePlanner.analyzeInternal(CalcitePlanner.java:286)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:258)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:512)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1317)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1457)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1227)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:233)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:184)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:403)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336)
at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:474)
at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:490)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:793)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:686)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:331)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:241)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:537)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
20/02/04 09:50:09 INFO ql.Driver: Completed compiling command(queryId=sga-dl_20200204095003_9b2a8fbf-d798-4146-b57c-ffcb5920891b); Time taken: 6.541 seconds
20/02/04 09:50:09 INFO conf.HiveConf: Using the default value passed in for log id: 9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:09 INFO session.SessionState: Resetting thread name to main
20/02/04 09:50:09 INFO conf.HiveConf: Using the default value passed in for log id: 9133a12b-b9d9-4d17-ad47-3dc6ca872ab6
20/02/04 09:50:09 INFO session.SessionState: Deleted directory: /tmp/hive/sga-dl/9133a12b-b9d9-4d17-ad47-3dc6ca872ab6 on fs with scheme hdfs
20/02/04 09:50:09 INFO session.SessionState: Deleted directory: /tmp/sga-dl/9133a12b-b9d9-4d17-ad47-3dc6ca872ab6 on fs with scheme file
20/02/04 09:50:09 INFO metastore.HiveMetaStore: 0: Cleaning up thread local RawStore...
20/02/04 09:50:09 INFO HiveMetaStore.audit: ugi=sga-dl ip=unknown-ip-addr cmd=Cleaning up thread local RawStore...
20/02/04 09:50:09 INFO metastore.HiveMetaStore: 0: Done cleaning up thread local RawStore
20/02/04 09:50:09 INFO HiveMetaStore.audit: ugi=sga-dl ip=unknown-ip-addr cmd=Done cleaning up thread local RawStore
20/02/04 09:50:09 ERROR tool.ImportTool: Import failed: java.io.IOException: Hive CliDriver exited with status=10072
at org.apache.sqoop.hive.HiveImport.executeScript(HiveImport.java:355)
at org.apache.sqoop.hive.HiveImport.importTable(HiveImport.java:241)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:537)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:628)
at org.apache.sqoop.Sqoop.run(Sqoop.java:147)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:183)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:234)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:243)
at org.apache.sqoop.Sqoop.main(Sqoop.java:252)
The data from mysql goes to hdfs, but the hive part just doesn't run. In my head the main 'offensor' is the error pointed here
2020-02-04 09:50:02,957 main ERROR Could not register mbeans java.security.AccessControlException: access denied ("javax.management.MBeanTrustPermission" "register")
How can i fix this?
I encountered the problem when I try to get a KafkaConsumer using KafkaClient.
I set a SecurityManager to the System to avoid "System.exit(in org.apache.spark.deploy.SparkSubmit.main)" calls, which will result in service exit.
def forbidSystemExistCall = {
val securityManager = new SecurityManager {
override def checkPermission(perm: Permission): Unit = {
if(perm.getName.startsWith("exitVM")) {
throw new ExitTrappedException
}
}
}
System.setSecurityManager(securityManager)
}
However,this SecurityManager does not handle "javax.management.MBeanTrustPermission",then the problems comes. My sulotion(work a charm in my app):
Policy.setPolicy(new MBeanRegistryPolicy)
class MBeanRegistryPolicy extends Policy {
val defaultPolicy = Policy.getPolicy()
override def implies(domain: ProtectionDomain, permission: Permission): Boolean = {
if(permission.isInstanceOf[javax.management.MBeanTrustPermission]) true
else defaultPolicy.implies(domain, permission)
}
}
By default, JVM doesn't use a SecurityManager, so if I don't invoke "forbidSystemCall"(which attach a user-defined SecurityManager to JVM), the problem did not occur.
May it help your problem.

org.springframework.boot.SpringApplication Application startup failed:java.lang.IllegalArgumentException:object is not an instance of declaring class

I'm getting below exception while deploying spring boot application in WildFly14. But same is deployed fine in WildFly10.
spring boot version is 1.5.18
have the same issue with spring 2x as well.
The application I'm deploying is cas overly downloaded from https://github.com/apereo/cas-overlay-template/tree/5.3
20:04:32,506 DEBUG [org.springframework.jndi.JndiPropertySource] (ServerService Thread Pool -- 213) JNDI lookup for name [banner.location] threw NamingException with message: banner.location -- service jboss.naming.context.java."banner.location". Returning null.
20:04:32,506 DEBUG [org.springframework.core.env.PropertySourcesPropertyResolver] (ServerService Thread Pool -- 213) Could not find key 'banner.location' in any property source
20:04:32,522 DEBUG [org.springframework.boot.logging.ClasspathLoggingApplicationListener] (ServerService Thread Pool -- 213) Application failed to start with classpath: unknown
20:04:32,522 ERROR [org.springframework.boot.SpringApplication] (ServerService Thread Pool -- 213) Application startup failed: java.lang.IllegalArgumentException: object is not an instance of declaring class
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.springframework.core.io.VfsUtils.invokeVfsMethod(VfsUtils.java:100)
at org.springframework.core.io.VfsUtils.getFile(VfsUtils.java:172)
at org.springframework.core.io.VfsResource.getFile(VfsResource.java:95)
at org.apereo.cas.util.CasVersion.getDateTime(CasVersion.java:59)
at org.apereo.cas.util.SystemUtils.getSystemInfo(SystemUtils.java:50)
at org.apereo.cas.util.spring.boot.AbstractCasBanner.collectEnvironmentInfo(AbstractCasBanner.java:63)
at org.apereo.cas.util.spring.boot.AbstractCasBanner.printBanner(AbstractCasBanner.java:37)
at org.springframework.boot.SpringApplicationBannerPrinter.print(SpringApplicationBannerPrinter.java:71)
at org.springframework.boot.SpringApplication.printBanner(SpringApplication.java:505)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:298)
at org.springframework.boot.web.support.SpringBootServletInitializer.run(SpringBootServletInitializer.java:156)
at org.springframework.boot.web.support.SpringBootServletInitializer.createRootApplicationContext(SpringBootServletInitializer.java:136)
at org.springframework.boot.web.support.SpringBootServletInitializer.onStartup(SpringBootServletInitializer.java:91)
at org.springframework.web.SpringServletContainerInitializer.onStartup(SpringServletContainerInitializer.java:169)
at io.undertow.servlet.core.DeploymentManagerImpl$1.call(DeploymentManagerImpl.java:203)
at io.undertow.servlet.core.DeploymentManagerImpl$1.call(DeploymentManagerImpl.java:185)
at io.undertow.servlet.core.ServletRequestContextThreadSetupAction$1.call(ServletRequestContextThreadSetupAction.java:42)
at io.undertow.servlet.core.ContextClassLoaderSetupAction$1.call(ContextClassLoaderSetupAction.java:43)
at org.wildfly.extension.undertow.security.SecurityContextThreadSetupAction.lambda$create$0(SecurityContextThreadSetupAction.java:105)
at org.wildfly.extension.undertow.security.SecurityContextThreadSetupAction$$Lambda$658/1543581401.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at io.undertow.servlet.core.DeploymentManagerImpl.deploy(DeploymentManagerImpl.java:250)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentService.startContext(UndertowDeploymentService.java:96)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentService$1.run(UndertowDeploymentService.java:78)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at org.jboss.threads.ContextClassLoaderSavingRunnable.run(ContextClassLoaderSavingRunnable.java:35)
at org.jboss.threads.EnhancedQueueExecutor.safeRun(EnhancedQueueExecutor.java:1985)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.doRunTask(EnhancedQueueExecutor.java:1487)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.run(EnhancedQueueExecutor.java:1378)
at java.lang.Thread.run(Thread.java:745)
at org.jboss.threads.JBossThread.run(JBossThread.java:485)
20:04:32,522 ERROR [org.jboss.msc.service.fail] (ServerService Thread Pool -- 213) MSC000001: Failed to start service jboss.deployment.unit."cas.war".undertow-deployment: org.jboss.msc.service.StartException in service jboss.deployment.unit."cas.war".undertow-deployment: java.lang.RuntimeException: java.lang.IllegalArgumentException: object is not an instance of declaring class
at org.wildfly.extension.undertow.deployment.UndertowDeploymentService$1.run(UndertowDeploymentService.java:81)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at org.jboss.threads.ContextClassLoaderSavingRunnable.run(ContextClassLoaderSavingRunnable.java:35)
at org.jboss.threads.EnhancedQueueExecutor.safeRun(EnhancedQueueExecutor.java:1985)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.doRunTask(EnhancedQueueExecutor.java:1487)
at org.jboss.threads.EnhancedQueueExecutor$ThreadBody.run(EnhancedQueueExecutor.java:1378)
at java.lang.Thread.run(Thread.java:745)
at org.jboss.threads.JBossThread.run(JBossThread.java:485)
Caused by: java.lang.RuntimeException: java.lang.IllegalArgumentException: object is not an instance of declaring class
at io.undertow.servlet.core.DeploymentManagerImpl.deploy(DeploymentManagerImpl.java:252)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentService.startContext(UndertowDeploymentService.java:96)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentService$1.run(UndertowDeploymentService.java:78)
... 8 more
Caused by: java.lang.IllegalArgumentException: object is not an instance of declaring class
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.springframework.core.io.VfsUtils.invokeVfsMethod(VfsUtils.java:100)
at org.springframework.core.io.VfsUtils.getFile(VfsUtils.java:172)
at org.springframework.core.io.VfsResource.getFile(VfsResource.java:95)
at org.apereo.cas.util.CasVersion.getDateTime(CasVersion.java:59)
at org.apereo.cas.util.SystemUtils.getSystemInfo(SystemUtils.java:50)
at org.apereo.cas.util.spring.boot.AbstractCasBanner.collectEnvironmentInfo(AbstractCasBanner.java:63)
at org.apereo.cas.util.spring.boot.AbstractCasBanner.printBanner(AbstractCasBanner.java:37)
at org.springframework.boot.SpringApplicationBannerPrinter.print(SpringApplicationBannerPrinter.java:71)
at org.springframework.boot.SpringApplication.printBanner(SpringApplication.java:505)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:298)
at org.springframework.boot.web.support.SpringBootServletInitializer.run(SpringBootServletInitializer.java:156)
at org.springframework.boot.web.support.SpringBootServletInitializer.createRootApplicationContext(SpringBootServletInitializer.java:136)
at org.springframework.boot.web.support.SpringBootServletInitializer.onStartup(SpringBootServletInitializer.java:91)
at org.springframework.web.SpringServletContainerInitializer.onStartup(SpringServletContainerInitializer.java:169)
at io.undertow.servlet.core.DeploymentManagerImpl$1.call(DeploymentManagerImpl.java:203)
at io.undertow.servlet.core.DeploymentManagerImpl$1.call(DeploymentManagerImpl.java:185)
at io.undertow.servlet.core.ServletRequestContextThreadSetupAction$1.call(ServletRequestContextThreadSetupAction.java:42)
at io.undertow.servlet.core.ContextClassLoaderSetupAction$1.call(ContextClassLoaderSetupAction.java:43)
at org.wildfly.extension.undertow.security.SecurityContextThreadSetupAction.lambda$create$0(SecurityContextThreadSetupAction.java:105)
at org.wildfly.extension.undertow.security.SecurityContextThreadSetupAction$$Lambda$658/1543581401.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction.lambda$create$0(UndertowDeploymentInfoService.java:1502)
at org.wildfly.extension.undertow.deployment.UndertowDeploymentInfoService$UndertowThreadSetupAction$$Lambda$659/1515086559.call(Unknown Source)
at io.undertow.servlet.core.DeploymentManagerImpl.deploy(DeploymentManagerImpl.java:250)
... 10 more
20:04:32,523 ERROR [org.jboss.as.controller.management-operation] (DeploymentScanner-threads - 2) WFLYCTL0013: Operation ("full-replace-deployment") failed - address: ([]) - failure description: {"WFLYCTL0080: Failed services" => {"jboss.deployment.unit.\"cas.war\".undertow-deployment" => "java.lang.RuntimeException: java.lang.IllegalArgumentException: object is not an instance of declaring class
Caused by: java.lang.RuntimeException: java.lang.IllegalArgumentException: object is not an instance of declaring class
Caused by: java.lang.IllegalArgumentException: object is not an instance of declaring class"}}

Spark ThriftServer failed to start in secured mode

Configured Spark-1.4.1 and Hive-1.2.1 on a Hadoop-2.7.1 secured cluster with kerberos. Started external metastore with no sasl enabled. I can do basic operations in Hive server2 with beeline.
When trying to start Spark Thrift Server, getting exception something related to delegation token.
Command
spark-submit --class org.apache.spark.deploy.history.HistoryServer --master yarn-client C:\Spark\lib\spark-core_2.10-1.4.0.jar
Exception in Spark
15/07/28 16:07:31 INFO scheduler.DAGScheduler: Stopping DAGScheduler
15/07/28 16:07:31 INFO cluster.YarnClientSchedulerBackend: Stopped
15/07/28 16:07:31 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
15/07/28 16:07:31 ERROR spark.SparkContext: Error stopping SparkContext after init error.
java.lang.NullPointerException
at org.apache.spark.network.netty.NettyBlockTransferService.close(NettyBlockTransferService.scala:152)
at org.apache.spark.storage.BlockManager.stop(BlockManager.scala:1216)
at org.apache.spark.SparkEnv.stop(SparkEnv.scala:96)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1659)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:565)
at org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:51)
at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:73)
at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:665)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:170)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:193)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Exception in thread "main" java.lang.RuntimeException: Unexpected exception
at org.apache.spark.deploy.yarn.Client$.org$apache$spark$deploy$yarn$Client$$obtainTokenForHiveMetastore(Client.scala:1162)
at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:263)
at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:561)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:115)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:141)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:497)
at org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:51)
at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.main(HiveThriftServer2.scala:73)
at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.main(HiveThriftServer2.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:665)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:170)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:193)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.spark.deploy.yarn.Client$.org$apache$spark$deploy$yarn$Client$$obtainTokenForHiveMetastore(Client.scala:1142)
... 18 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: MetaException(message:java.lang.NullPointerException)
at org.apache.hadoop.hive.ql.metadata.Hive.getDelegationToken(Hive.java:2575)
... 23 more
Caused by: MetaException(message:java.lang.NullPointerException)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_delegation_token_result$get_delegation_token_resultStandardScheme.read(ThriftHiveMetastore.java)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_delegation_token_result$get_delegation_token_resultStandardScheme.read(ThriftHiveMetastore.java)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_delegation_token_result.read(ThriftHiveMetastore.java)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:78)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_delegation_token(ThriftHiveMetastore.java:3293)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_delegation_token(ThriftHiveMetastore.java:3279)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getDelegationToken(HiveMetaStoreClient.java:1521)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
at com.sun.proxy.$Proxy23.getDelegationToken(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.getDelegationToken(Hive.java:2572)
... 23 more
15/07/28 16:07:31 INFO storage.DiskBlockManager: Shutdown hook called
Exception in Hive Logs
2015-07-28 16:05:04,543 ERROR [pool-3-thread-5]: metastore.RetryingHMSHandler (RetryingHMSHandler.java:invoke(155)) - MetaException(message:java.lang.NullPointerException)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newMetaException(HiveMetaStore.java:5393)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_delegation_token(HiveMetaStore.java:5272)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
at com.sun.proxy.$Proxy5.get_delegation_token(Unknown Source)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$get_delegation_token.getResult(ThriftHiveMetastore.java:11488)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$get_delegation_token.getResult(ThriftHiveMetastore.java:11472)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
at org.apache.hadoop.hive.metastore.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:48)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:285)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:744)
Caused by: java.lang.NullPointerException
at org.apache.hadoop.hive.metastore.HiveMetaStore.getDelegationToken(HiveMetaStore.java:5784)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_delegation_token(HiveMetaStore.java:5256)
... 15 more
Help me to solve this.
This issue has been fixed in Spark-1.5.0. You can find the issue link below.
https://issues.apache.org/jira/browse/SPARK-5111

How to set the configuration for running MR job in CDH5 Hue?

I have successfully installed and initiated the CDH5 and when I am trying to run a simple WordCount example via Hue, I am facing the following error.
2015-05-13 12:58:04,374 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[localhost] USER[hdfs] GROUP[-] TOKEN[] APP[trialWC] JOB[0000000-150513124629466-oozie-oozi-W] ACTION[0000000-150513124629466-oozie-oozi-W#:start:] Start action [0000000-150513124629466-oozie-oozi-W#:start:] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10]
2015-05-13 12:58:04,375 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[localhost] USER[hdfs] GROUP[-] TOKEN[] APP[trialWC] JOB[0000000-150513124629466-oozie-oozi-W] ACTION[0000000-150513124629466-oozie-oozi-W#:start:] [***0000000-150513124629466-oozie-oozi-W#:start:***]Action status=DONE
2015-05-13 12:58:04,375 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[localhost] USER[hdfs] GROUP[-] TOKEN[] APP[trialWC] JOB[0000000-150513124629466-oozie-oozi-W] ACTION[0000000-150513124629466-oozie-oozi-W#:start:] [***0000000-150513124629466-oozie-oozi-W#:start:***]Action updated in DB!
2015-05-13 12:58:04,477 INFO org.apache.oozie.command.wf.ActionStartXCommand: SERVER[localhost] USER[hdfs] GROUP[-] TOKEN[] APP[trialWC] JOB[0000000-150513124629466-oozie-oozi-W] ACTION[0000000-150513124629466-oozie-oozi-W#java-d1fb] Start action [0000000-150513124629466-oozie-oozi-W#java-d1fb] with user-retry state : userRetryCount [0], userRetryMax [0], userRetryInterval [10]
2015-05-13 12:58:05,745 WARN org.apache.oozie.command.wf.ActionStartXCommand: SERVER[localhost] USER[hdfs] GROUP[-] TOKEN[] APP[trialWC] JOB[0000000-150513124629466-oozie-oozi-W] ACTION[0000000-150513124629466-oozie-oozi-W#java-d1fb] Error starting action [java-d1fb]. ErrorType [TRANSIENT], ErrorCode [JA009], Message [JA009: org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException: Invalid resource request, requested memory < 0, or requested memory > max configured, requestedMemory=1536, maxMemory=1024
at org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils.validateResourceRequest(SchedulerUtils.java:203)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.validateAndCreateResourceRequest(RMAppManager.java:377)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.createAndPopulateNewRMApp(RMAppManager.java:320)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.submitApplication(RMAppManager.java:273)
at org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.submitApplication(ClientRMService.java:574)
at org.apache.hadoop.yarn.api.impl.pb.service.ApplicationClientProtocolPBServiceImpl.submitApplication(ApplicationClientProtocolPBServiceImpl.java:213)
at org.apache.hadoop.yarn.proto.ApplicationClientProtocol$ApplicationClientProtocolService$2.callBlockingMethod(ApplicationClientProtocol.java:403)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)
]
org.apache.oozie.action.ActionExecutorException: JA009: org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException: Invalid resource request, requested memory < 0, or requested memory > max configured, requestedMemory=1536, maxMemory=1024
at org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils.validateResourceRequest(SchedulerUtils.java:203)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.validateAndCreateResourceRequest(RMAppManager.java:377)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.createAndPopulateNewRMApp(RMAppManager.java:320)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.submitApplication(RMAppManager.java:273)
at org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.submitApplication(ClientRMService.java:574)
at org.apache.hadoop.yarn.api.impl.pb.service.ApplicationClientProtocolPBServiceImpl.submitApplication(ApplicationClientProtocolPBServiceImpl.java:213)
at org.apache.hadoop.yarn.proto.ApplicationClientProtocol$ApplicationClientProtocolService$2.callBlockingMethod(ApplicationClientProtocol.java:403)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)
at org.apache.oozie.action.ActionExecutor.convertExceptionHelper(ActionExecutor.java:456)
at org.apache.oozie.action.ActionExecutor.convertException(ActionExecutor.java:440)
at org.apache.oozie.action.hadoop.JavaActionExecutor.submitLauncher(JavaActionExecutor.java:1008)
at org.apache.oozie.action.hadoop.JavaActionExecutor.start(JavaActionExecutor.java:1162)
at org.apache.oozie.command.wf.ActionStartXCommand.execute(ActionStartXCommand.java:234)
at org.apache.oozie.command.wf.ActionStartXCommand.execute(ActionStartXCommand.java:64)
at org.apache.oozie.command.XCommand.call(XCommand.java:286)
at org.apache.oozie.service.CallableQueueService$CompositeCallable.call(CallableQueueService.java:321)
at org.apache.oozie.service.CallableQueueService$CompositeCallable.call(CallableQueueService.java:250)
at org.apache.oozie.service.CallableQueueService$CallableWrapper.run(CallableQueueService.java:175)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.io.IOException: org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException: Invalid resource request, requested memory < 0, or requested memory > max configured, requestedMemory=1536, maxMemory=1024
at org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils.validateResourceRequest(SchedulerUtils.java:203)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.validateAndCreateResourceRequest(RMAppManager.java:377)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.createAndPopulateNewRMApp(RMAppManager.java:320)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.submitApplication(RMAppManager.java:273)
at org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.submitApplication(ClientRMService.java:574)
at org.apache.hadoop.yarn.api.impl.pb.service.ApplicationClientProtocolPBServiceImpl.submitApplication(ApplicationClientProtocolPBServiceImpl.java:213)
at org.apache.hadoop.yarn.proto.ApplicationClientProtocol$ApplicationClientProtocolService$2.callBlockingMethod(ApplicationClientProtocol.java:403)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)
at org.apache.hadoop.mapred.YARNRunner.submitJob(YARNRunner.java:306)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:536)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1306)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1303)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1303)
at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:564)
at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:559)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:559)
at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:550)
at org.apache.oozie.action.hadoop.JavaActionExecutor.submitLauncher(JavaActionExecutor.java:993)
... 10 more
Caused by: org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException: Invalid resource request, requested memory < 0, or requested memory > max configured, requestedMemory=1536, maxMemory=1024
at org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils.validateResourceRequest(SchedulerUtils.java:203)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.validateAndCreateResourceRequest(RMAppManager.java:377)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.createAndPopulateNewRMApp(RMAppManager.java:320)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.submitApplication(RMAppManager.java:273)
at org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.submitApplication(ClientRMService.java:574)
at org.apache.hadoop.yarn.api.impl.pb.service.ApplicationClientProtocolPBServiceImpl.submitApplication(ApplicationClientProtocolPBServiceImpl.java:213)
at org.apache.hadoop.yarn.proto.ApplicationClientProtocol$ApplicationClientProtocolService$2.callBlockingMethod(ApplicationClientProtocol.java:403)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.hadoop.yarn.ipc.RPCUtil.instantiateException(RPCUtil.java:53)
at org.apache.hadoop.yarn.ipc.RPCUtil.unwrapAndThrowException(RPCUtil.java:101)
at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.submitApplication(ApplicationClientProtocolPBClientImpl.java:235)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy37.submitApplication(Unknown Source)
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.submitApplication(YarnClientImpl.java:240)
at org.apache.hadoop.mapred.ResourceMgrDelegate.submitApplication(ResourceMgrDelegate.java:290)
at org.apache.hadoop.mapred.YARNRunner.submitJob(YARNRunner.java:290)
... 25 more
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException): Invalid resource request, requested memory < 0, or requested memory > max configured, requestedMemory=1536, maxMemory=1024
at org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils.validateResourceRequest(SchedulerUtils.java:203)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.validateAndCreateResourceRequest(RMAppManager.java:377)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.createAndPopulateNewRMApp(RMAppManager.java:320)
at org.apache.hadoop.yarn.server.resourcemanager.RMAppManager.submitApplication(RMAppManager.java:273)
at org.apache.hadoop.yarn.server.resourcemanager.ClientRMService.submitApplication(ClientRMService.java:574)
at org.apache.hadoop.yarn.api.impl.pb.service.ApplicationClientProtocolPBServiceImpl.submitApplication(ApplicationClientProtocolPBServiceImpl.java:213)
at org.apache.hadoop.yarn.proto.ApplicationClientProtocol$ApplicationClientProtocolService$2.callBlockingMethod(ApplicationClientProtocol.java:403)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1060)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2044)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2040)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1671)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2038)
at org.apache.hadoop.ipc.Client.call(Client.java:1468)
at org.apache.hadoop.ipc.Client.call(Client.java:1399)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy36.submitApplication(Unknown Source)
at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.submitApplication(ApplicationClientProtocolPBClientImpl.java:232)
... 35 more
PS: I have set the Container Memory Maximum (yarn.scheduler.maximum-allocation-mb) to 2GB, with no help.
I have successfully found the configuration that needs to be changed.
It is the Container Memory (yarn.nodemanager.resource.memory-mb). I set it to 2GB, sufficient for my current usage. Other configurations can also be tried out, keeping in mind that it should not exceed the maximum memory!

the Play framework 2 crashes on validation error

Play framework 2.2.0 crashes when user enters a value out of range in a field that is binded to a model field with #Min annotation.
To ensure that it's not a problem caused by my own models, I modified computer-database sample from play directory. I added an Integer value (testVal) to the Computer.java model and added an inputText to editForm.scala.html and added Ebean auto-DDL generation comment to the first line of 1.sql.
Now when I edit a Computer and enter a value less than minimum value for testVal field, play crashes and this error is displayed:
[info] play - database [default] connected at jdbc:h2:mem:play
[info] play - Application started (Dev)
[ERROR] [02/09/2014 08:54:52.050] [play-akka.actor.default-dispatcher-3] [ActorSystem(play)] Uncaught error from thread [play-akka.actor.default-dispatcher-3] shutting down JVM since 'akka.jvm-exit-on-fatal-error' is enabled
java.lang.NoClassDefFoundError: javax/el/PropertyNotFoundException
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateExpression(ResourceBundleMessageInterpolator.java:227)
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateMessage(ResourceBundleMessageInterpolator.java:187)
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolate(ResourceBundleMessageInterpolator.java:115)
at org.hibernate.validator.internal.engine.ValidationContext.interpolate(ValidationContext.java:370)
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolation(ValidationContext.java:284)
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolations(ValidationContext.java:246)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateSingleConstraint(ConstraintTree.java:289)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateConstraints(ConstraintTree.java:133)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateConstraints(ConstraintTree.java:91)
at org.hibernate.validator.internal.metadata.core.MetaConstraint.validateConstraint(MetaConstraint.java:85)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraint(ValidatorImpl.java:478)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraintsForDefaultGroup(ValidatorImpl.java:424)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraintsForCurrentGroup(ValidatorImpl.java:388)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateInContext(ValidatorImpl.java:340)
at org.hibernate.validator.internal.engine.ValidatorImpl.validate(ValidatorImpl.java:158)
at org.springframework.validation.beanvalidation.SpringValidatorAdapter.validate(SpringValidatorAdapter.java:199)
at play.data.Form.bind(Form.java:327)
at play.data.Form.bindFromRequest(Form.java:215)
at controllers.Application.update(Application.java:67)
at Routes$$anonfun$routes$1$$anonfun$applyOrElse$6$$anonfun$apply$6.apply(routes_routing.scala:113)
at Routes$$anonfun$routes$1$$anonfun$applyOrElse$6$$anonfun$apply$6.apply(routes_routing.scala:113)
at play.core.Router$HandlerInvoker$$anon$7$$anon$2.invocation(Router.scala:183)
at play.core.Router$Routes$$anon$1.invocation(Router.scala:377)
at play.core.j.JavaAction$$anon$1.call(JavaAction.scala:56)
at play.core.j.JavaAction$$anon$3.apply(JavaAction.scala:91)
at play.core.j.JavaAction$$anon$3.apply(JavaAction.scala:90)
at play.core.j.FPromiseHelper$$anonfun$flatMap$1.apply(FPromiseHelper.scala:82)
at play.core.j.FPromiseHelper$$anonfun$flatMap$1.apply(FPromiseHelper.scala:82)
at scala.concurrent.Future$$anonfun$flatMap$1.apply(Future.scala:278)
at scala.concurrent.Future$$anonfun$flatMap$1.apply(Future.scala:274)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:29)
at play.core.j.HttpExecutionContext$$anon$2.run(HttpExecutionContext.scala:37)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: java.lang.ClassNotFoundException: javax.el.PropertyNotFoundException
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 38 more
[error] a.a.ActorSystemImpl - Uncaught error from thread [play-akka.actor.default- dispatcher-3] shutting down JVM since 'akka.jvm-exit-on-fatal-error' is enabled
java.lang.NoClassDefFoundError: javax/el/PropertyNotFoundException
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateExpression(ResourceBundleMessageInterpolator.java:227) ~[hibernate-validator.jar:5.0.1.Final]
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateMessage(ResourceBundleMessageInterpolator.java:187) ~[hibernate-validator.jar:5.0.1.Final]
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolate(ResourceBundleMessageInterpolator.java:115) ~[hibernate-validator.jar:5.0.1.Final]
at org.hibernate.validator.internal.engine.ValidationContext.interpolate(ValidationContext.java:370) ~[hibernate-validator.jar:5.0.1.Final]
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolation(ValidationContext.java:284) ~[hibernate-validator.jar:5.0.1.Final]
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolations(ValidationContext.java:246) ~[hibernate-validator.jar:5.0.1.Final]
Caused by: java.lang.ClassNotFoundException: javax.el.PropertyNotFoundException
at java.net.URLClassLoader$1.run(URLClassLoader.java:366) ~[na:1.7.0_51]
at java.net.URLClassLoader$1.run(URLClassLoader.java:355) ~[na:1.7.0_51]
at java.security.AccessController.doPrivileged(Native Method) ~[na:1.7.0_51]
at java.net.URLClassLoader.findClass(URLClassLoader.java:354) ~[na:1.7.0_51]
at java.lang.ClassLoader.loadClass(ClassLoader.java:425) ~[na:1.7.0_51]
at java.lang.ClassLoader.loadClass(ClassLoader.java:358) ~[na:1.7.0_51]
Uncaught error from thread [play-akka.actor.default-dispatcher-3] shutting down JVM since 'akka.jvm-exit-on-fatal-error' is enabled for ActorSystem[play]
java.lang.NoClassDefFoundError: javax/el/PropertyNotFoundException
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateExpression(ResourceBundleMessageInterpolator.java:227)
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolateMessage(ResourceBundleMessageInterpolator.java:187)
at org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator.interpolate(ResourceBundleMessageInterpolator.java:115)
at org.hibernate.validator.internal.engine.ValidationContext.interpolate(ValidationContext.java:370)
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolation(ValidationContext.java:284)
at org.hibernate.validator.internal.engine.ValidationContext.createConstraintViolations(ValidationContext.java:246)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateSingleConstraint(ConstraintTree.java:289)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateConstraints(ConstraintTree.java:133)
at org.hibernate.validator.internal.engine.constraintvalidation.ConstraintTree.validateConstraints(ConstraintTree.java:91)
at org.hibernate.validator.internal.metadata.core.MetaConstraint.validateConstraint(MetaConstraint.java:85)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraint(ValidatorImpl.java:478)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraintsForDefaultGroup(ValidatorImpl.java:424)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateConstraintsForCurrentGroup(ValidatorImpl.java:388)
at org.hibernate.validator.internal.engine.ValidatorImpl.validateInContext(ValidatorImpl.java:340)
at org.hibernate.validator.internal.engine.ValidatorImpl.validate(ValidatorImpl.java:158)
at org.springframework.validation.beanvalidation.SpringValidatorAdapter.validate(SpringValidatorAdapter.java:199)
at play.data.Form.bind(Form.java:327)
at play.data.Form.bindFromRequest(Form.java:215)
at controllers.Application.update(Application.java:67)
at Routes$$anonfun$routes$1$$anonfun$applyOrElse$6$$anonfun$apply$6.apply(routes_routing.scala:113)
at Routes$$anonfun$routes$1$$anonfun$applyOrElse$6$$anonfun$apply$6.apply(routes_routing.scala:113)
at play.core.Router$HandlerInvoker$$anon$7$$anon$2.invocation(Router.scala:183)
at play.core.Router$Routes$$anon$1.invocation(Router.scala:377)
at play.core.j.JavaAction$$anon$1.call(JavaAction.scala:56)
at play.core.j.JavaAction$$anon$3.apply(JavaAction.scala:91)
at play.core.j.JavaAction$$anon$3.apply(JavaAction.scala:90)
at play.core.j.FPromiseHelper$$anonfun$flatMap$1.apply(FPromiseHelper.scala:82)
at play.core.j.FPromiseHelper$$anonfun$flatMap$1.apply(FPromiseHelper.scala:82)
at scala.concurrent.Future$$anonfun$flatMap$1.apply(Future.scala:278)
at scala.concurrent.Future$$anonfun$flatMap$1.apply(Future.scala:274)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:29)
at play.core.j.HttpExecutionContext$$anon$2.run(HttpExecutionContext.scala:37)
at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:42)
at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
Caused by: java.lang.ClassNotFoundException: javax.el.PropertyNotFoundException
at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
... 38 more
Is it possible that the installed play framework is corrupted?

Resources