I get this exception every time I run gradle. No matter where I run gradle, it says build failed with org/apache/http/ssl/SSLInitializationException as reason.
I have no idea what is causing it, and why it happens, and it seems like gradle is completely unable to do anything at all.
$ gradle --version
------------------------------------------------------------
Gradle 2.13
------------------------------------------------------------
Build time: 2016-09-04 20:51:16 UTC
Build number: none
Revision: <unknown>
Groovy: 2.4.7
Ant: Apache Ant(TM) version 1.9.4 compiled on October 7 2014
JVM: 1.8.0_101 (Oracle Corporation 25.101-b13)
OS: Linux 3.16.0-4-amd64 amd64
Anyone have any idea what is going on, and why this happens?
Long stacktrace:
FAILURE: Build failed with an exception.
* What went wrong:
org/apache/http/ssl/SSLInitializationException
* Try:
Run with --info or --debug option to get more log output.
* Exception is:
java.lang.NoClassDefFoundError: org/apache/http/ssl/SSLInitializationException
at org.gradle.internal.resource.transport.http.DefaultSslContextFactory.<init>(DefaultSslContextFactory.java:53)
at org.gradle.internal.resource.transport.http.HttpResourcesPluginServiceRegistry$GlobalScopeServices.createSslContextFactory(HttpResourcesPluginServiceRegistry.java:48)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:75)
at org.gradle.internal.service.DefaultServiceRegistry.invoke(DefaultServiceRegistry.java:426)
at org.gradle.internal.service.DefaultServiceRegistry.access$1200(DefaultServiceRegistry.java:62)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryMethodService.invokeMethod(DefaultServiceRegistry.java:739)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.create(DefaultServiceRegistry.java:695)
at org.gradle.internal.service.DefaultServiceRegistry$ManagedObjectProvider.getInstance(DefaultServiceRegistry.java:537)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.get(DefaultServiceRegistry.java:577)
at org.gradle.internal.service.DefaultServiceRegistry.doGet(DefaultServiceRegistry.java:393)
at org.gradle.internal.service.DefaultServiceRegistry.get(DefaultServiceRegistry.java:378)
at org.gradle.internal.service.DefaultServiceRegistry$ParentServices.getService(DefaultServiceRegistry.java:949)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry.getServiceProvider(DefaultServiceRegistry.java:398)
at org.gradle.internal.service.DefaultServiceRegistry.doGet(DefaultServiceRegistry.java:390)
at org.gradle.internal.service.DefaultServiceRegistry.get(DefaultServiceRegistry.java:378)
at org.gradle.internal.service.DefaultServiceRegistry$ParentServices.getService(DefaultServiceRegistry.java:949)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.bind(DefaultServiceRegistry.java:669)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.prepare(DefaultServiceRegistry.java:582)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.getService(DefaultServiceRegistry.java:595)
at org.gradle.internal.service.DefaultServiceRegistry$OwnServices.getService(DefaultServiceRegistry.java:486)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.bind(DefaultServiceRegistry.java:669)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.prepare(DefaultServiceRegistry.java:582)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.getService(DefaultServiceRegistry.java:595)
at org.gradle.internal.service.DefaultServiceRegistry$OwnServices.getService(DefaultServiceRegistry.java:486)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.bind(DefaultServiceRegistry.java:669)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.prepare(DefaultServiceRegistry.java:582)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.getService(DefaultServiceRegistry.java:595)
at org.gradle.internal.service.DefaultServiceRegistry$OwnServices.getService(DefaultServiceRegistry.java:486)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.bind(DefaultServiceRegistry.java:669)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.prepare(DefaultServiceRegistry.java:582)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.getService(DefaultServiceRegistry.java:595)
at org.gradle.internal.service.DefaultServiceRegistry$OwnServices.getService(DefaultServiceRegistry.java:486)
at org.gradle.internal.service.DefaultServiceRegistry$CompositeProvider.getService(DefaultServiceRegistry.java:894)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1120)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext$4.apply(DefaultServiceRegistry.java:1117)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.getServiceProvider(DefaultServiceRegistry.java:1074)
at org.gradle.internal.service.DefaultServiceRegistry$DefaultLookupContext.find(DefaultServiceRegistry.java:1062)
at org.gradle.internal.service.DefaultServiceRegistry.getServiceProvider(DefaultServiceRegistry.java:398)
at org.gradle.internal.service.DefaultServiceRegistry.doGet(DefaultServiceRegistry.java:390)
at org.gradle.internal.service.DefaultServiceRegistry.get(DefaultServiceRegistry.java:374)
at org.gradle.internal.service.scopes.BuildScopeServices.createScriptObjectConfigurerFactory(BuildScopeServices.java:227)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:75)
at org.gradle.internal.service.DefaultServiceRegistry.invoke(DefaultServiceRegistry.java:426)
at org.gradle.internal.service.DefaultServiceRegistry.access$1200(DefaultServiceRegistry.java:62)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryMethodService.invokeMethod(DefaultServiceRegistry.java:739)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.create(DefaultServiceRegistry.java:695)
at org.gradle.internal.service.DefaultServiceRegistry$ManagedObjectProvider.getInstance(DefaultServiceRegistry.java:537)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.get(DefaultServiceRegistry.java:577)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.assembleParameters(DefaultServiceRegistry.java:705)
at org.gradle.internal.service.DefaultServiceRegistry$FactoryService.create(DefaultServiceRegistry.java:694)
at org.gradle.internal.service.DefaultServiceRegistry$ManagedObjectProvider.getInstance(DefaultServiceRegistry.java:537)
at org.gradle.internal.service.DefaultServiceRegistry$SingletonService.get(DefaultServiceRegistry.java:577)
at org.gradle.internal.service.DefaultServiceRegistry.doGet(DefaultServiceRegistry.java:393)
at org.gradle.internal.service.DefaultServiceRegistry.get(DefaultServiceRegistry.java:374)
at org.gradle.initialization.DefaultGradleLauncherFactory.doNewInstance(DefaultGradleLauncherFactory.java:139)
at org.gradle.initialization.DefaultGradleLauncherFactory.newInstance(DefaultGradleLauncherFactory.java:102)
at org.gradle.launcher.exec.InProcessBuildActionExecuter.execute(InProcessBuildActionExecuter.java:38)
at org.gradle.launcher.exec.InProcessBuildActionExecuter.execute(InProcessBuildActionExecuter.java:28)
at org.gradle.launcher.exec.ContinuousBuildActionExecuter.execute(ContinuousBuildActionExecuter.java:81)
at org.gradle.launcher.exec.ContinuousBuildActionExecuter.execute(ContinuousBuildActionExecuter.java:46)
at org.gradle.launcher.exec.DaemonUsageSuggestingBuildActionExecuter.execute(DaemonUsageSuggestingBuildActionExecuter.java:51)
at org.gradle.launcher.exec.DaemonUsageSuggestingBuildActionExecuter.execute(DaemonUsageSuggestingBuildActionExecuter.java:28)
at org.gradle.launcher.cli.RunBuildAction.run(RunBuildAction.java:43)
at org.gradle.internal.Actions$RunnableActionAdapter.execute(Actions.java:173)
at org.gradle.launcher.cli.CommandLineActionFactory$ParseAndBuildAction.execute(CommandLineActionFactory.java:241)
at org.gradle.launcher.cli.CommandLineActionFactory$ParseAndBuildAction.execute(CommandLineActionFactory.java:214)
at org.gradle.launcher.cli.JavaRuntimeValidationAction.execute(JavaRuntimeValidationAction.java:35)
at org.gradle.launcher.cli.JavaRuntimeValidationAction.execute(JavaRuntimeValidationAction.java:24)
at org.gradle.launcher.cli.CommandLineActionFactory$WithLogging.execute(CommandLineActionFactory.java:207)
at org.gradle.launcher.cli.CommandLineActionFactory$WithLogging.execute(CommandLineActionFactory.java:169)
at org.gradle.launcher.cli.ExceptionReportingAction.execute(ExceptionReportingAction.java:33)
at org.gradle.launcher.cli.ExceptionReportingAction.execute(ExceptionReportingAction.java:22)
at org.gradle.launcher.Main.doAction(Main.java:33)
at org.gradle.launcher.bootstrap.EntryPoint.run(EntryPoint.java:45)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.gradle.launcher.bootstrap.ProcessBootstrap.runNoExit(ProcessBootstrap.java:55)
at org.gradle.launcher.bootstrap.ProcessBootstrap.run(ProcessBootstrap.java:36)
at org.gradle.launcher.GradleMain.main(GradleMain.java:23)
Caused by: java.lang.ClassNotFoundException: org.apache.http.ssl.SSLInitializationException
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 116 more
Related
I have a Django project. It has some files with cyrillic chars in their names:
SonarQube gives error:
java.nio.file.InvalidPathException: Malformed input or input contains
unmappable characters:
src/apps/orders/sap_data_examples/ZLO_CANCEL_ORDERS/??????????
?????????????? ??????????????.json at
java.base/sun.nio.fs.UnixPath.encode(Unknown Source) at
java.base/sun.nio.fs.UnixPath.(Unknown Source) at
java.base/sun.nio.fs.UnixFileSystem.getPath(Unknown Source) at
java.base/java.nio.file.Path.resolve(Unknown Source) at
org.sonar.scm.git.IncludedFilesRepository.indexFiles(IncludedFilesRepository.java:65)
at
org.sonar.scm.git.IncludedFilesRepository.(IncludedFilesRepository.java:40)
at org.sonar.scm.git.GitIgnoreCommand.init(GitIgnoreCommand.java:37)
at
org.sonar.scanner.scan.filesystem.ProjectFileIndexer.index(ProjectFileIndexer.java:104)
at
org.sonar.scanner.scan.ProjectScanContainer.doAfterStart(ProjectScanContainer.java:352)
at
org.sonar.core.platform.ComponentContainer.startComponents(ComponentContainer.java:137)
at
org.sonar.core.platform.ComponentContainer.execute(ComponentContainer.java:123)
at
org.sonar.scanner.bootstrap.GlobalContainer.doAfterStart(GlobalContainer.java:150)
at
org.sonar.core.platform.ComponentContainer.startComponents(ComponentContainer.java:137)
at
org.sonar.core.platform.ComponentContainer.execute(ComponentContainer.java:123)
at org.sonar.batch.bootstrapper.Batch.doExecute(Batch.java:72) at
org.sonar.batch.bootstrapper.Batch.execute(Batch.java:66) at
org.sonarsource.scanner.api.internal.batch.BatchIsolatedLauncher.execute(BatchIsolatedLauncher.java:46)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method) at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(Unknown
Source) at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(Unknown
Source) at java.base/java.lang.reflect.Method.invoke(Unknown Source)
at
org.sonarsource.scanner.api.internal.IsolatedLauncherProxy.invoke(IsolatedLauncherProxy.java:60)
at com.sun.proxy.$Proxy0.execute(Unknown Source) at
org.sonarsource.scanner.api.EmbeddedScanner.doExecute(EmbeddedScanner.java:189)
at
org.sonarsource.scanner.api.EmbeddedScanner.execute(EmbeddedScanner.java:138)
at org.sonarsource.scanner.cli.Main.execute(Main.java:112) at
org.sonarsource.scanner.cli.Main.execute(Main.java:75) at
org.sonarsource.scanner.cli.Main.main(Main.java:61)
SonarQube in gitlab-ci is being started as follows:
analyze:
stage: analyze
needs: ["tests"]
dependencies:
- tests
image:
name: docker-sre.site.ru/sonarqube/scanner:${SONAR_TAG}
entrypoint: [""]
before_script:
- echo code analysis will be executed on ${SONAR_HOST}
script:
- /entrypoint.sh ${SONAR_HOST} ${SONAR_LOGIN} -X
allow_failure: true
artifacts:
when: always
reports:
junit: report.xml
coverage_report:
coverage_format: cobertura
path: coverage.xml
I supposed that the error related to cyrillic chars in file names. If so how to fix this? Maybe specify encoding some how?
RUN THIS COMMAND
/bin/nifi.sh stateless RunFromRegistry Once --file ./test/stateless_test1.json
LOG
Note: Use of this command is considered experimental. The commands and approach used may change from time to time.
Java home (JAVA_HOME): /home/deltaman/software/jdk1.8.0_211
Java options (STATELESS_JAVA_OPTS): -Xms1024m -Xmx1024m
13:48:39.835 [main] INFO org.apache.nifi.StatelessNiFi - Unpacking 100 NARs
13:50:51.513 [main] INFO org.apache.nifi.StatelessNiFi - Finished unpacking 100 NARs in 131671 millis
Exception in thread "main" java.lang.reflect.InvocationTargetException
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.nifi.StatelessNiFi.main(StatelessNiFi.java:103)
... 5 more
Caused by: java.nio.file.NoSuchFileException: ./test/stateless_test1.json
at sun.nio.fs.UnixException.translateToIOException(UnixException.java:86)
at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:102)
at sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:107)
at sun.nio.fs.UnixFileSystemProvider.newByteChannel(UnixFileSystemProvider.java:214)
at java.nio.file.Files.newByteChannel(Files.java:361)
at java.nio.file.Files.newByteChannel(Files.java:407)
at java.nio.file.Files.readAllBytes(Files.java:3152)
at org.apache.nifi.stateless.runtimes.Program.runLocal(Program.java:119)
at org.apache.nifi.stateless.runtimes.Program.launch(Program.java:67)
... 10 more
it seems no exist file,but i can find the file as follows:
$ cat ./test/stateless_test1.json
{
"registryUrl": "http://10.148.123.12:9991",
"bucketId": "ec1b291e-c3f1-437c-a4e4-c069bd2f6ed1",
"flowId": "b1f73fe8-2874-47a5-970c-6b25eea19497",
"parameters": {
"text" : "xixixixi"
}
}
CONFIGURATION
IDK WHAT IS THE PROBLEM?
ANY SUGGESTION IS APPRECIATION!
/bin/nifi.sh stateless RunFromRegistry Once --file ./test/stateless_test1.json
it is relative path,must use full path,such as
/home/NiFi/nifi-1.10.0/bin/nifi.sh stateless RunFromRegistry Once --file /home/NiFi/nifi-1.10.0/test/stateless_test1.json
I have installed Spark on my laptop and I am trying to execute some very basic commands. Most of them work except .saveAsTextFile. In pyshell I wrote
nums=sc.parallellize([1,2,3])
nums.saveAsTextFile("file:///C:/Java/ouput")
The last statement of saveAsTextFile gives me the following error
[Stage 0:> (0 + 8)
/ 8]2018-03-24 11:48:14 ERROR Executor:91 - Exception in task 6.0 in stage
0.0 (TID 6)
ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2018-03-24 11:48:14 WARN TaskSetManager:66 - Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2018-03-24 11:48:14 ERROR TaskSetManager:70 - Task 6 in stage 0.0 failed 1 times; aborting job
2018-03-24 11:48:14 ERROR SparkHadoopWriter:91 - Aborting job job_20180324114813_0003.
org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 1 times, most recent failure: Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2080)
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1493)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1472)
at org.apache.spark.api.java.JavaRDDLike$class.saveAsTextFile(JavaRDDLike.scala:550)
at org.apache.spark.api.java.AbstractJavaRDDLike.saveAsTextFile(JavaRDDLike.scala:45)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:748)
Caused by: ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
... 1 more
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\spark-2.3.0-bin-hadoop2.7\python\pyspark\rdd.py", line 1568, in saveAsTextFile
keyed._jrdd.map(self.ctx._jvm.BytesToString()).saveAsTextFile(path)
File "C:\spark-2.3.0-bin-hadoop2.7\python\lib\py4j-0.10.6-src.zip\py4j\java_gateway.py", line 1160, in __call__
File "C:\spark-2.3.0-bin-hadoop2.7\python\pyspark\sql\utils.py", line 63, in deco
return f(*a, **kw)
File "C:\spark-2.3.0-bin-hadoop2.7\python\lib\py4j-0.10.6-src.zip\py4j\protocol.py", line 320, in get_return_value
py4j.protocol.Py4JJavaError: An error occurred while calling o39.saveAsTextFile.
: org.apache.spark.SparkException: Job aborted.
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:96)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1493)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1472)
at org.apache.spark.api.java.JavaRDDLike$class.saveAsTextFile(JavaRDDLike.scala:550)
at org.apache.spark.api.java.AbstractJavaRDDLike.saveAsTextFile(JavaRDDLike.scala:45)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 1 times, most recent failure: Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2080)
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78)
... 41 more
The error message was too long and mostly repetitive so I posted most of the it but couldn't include the last part due to size constraint.
This seems very basic and still somehow doesn't work. I am not sure what I might be doing wrong. Most of the suggestions that I have found until now have not worked for me. Any help will be greatly appreciated.
Have you set winutils.exe correctly?
Set the HADOOP_HOME environment variable correctly and make sure that winutils.exe can execute. If it can't, you may need to download MSVC Runtime Library.
Actually you should write to HDFS (Hadoop File System) as default.
I have simulated a session on a single cluster node. Here is the full list of commands creating the list, writing it to HDFS and finally printing out the results on the console using hdfs:
spark-shell
After the shell has started you type:
val nums = sc.parallelize(List(1,2,3,4,5))
nums.saveAsTextFile("/tmp/simple_list")
:quit
Now we read the data from HDFS (Hadoop File System):
hdfs dfs -ls /tmp/simple_list
This prints something like:
Found 3 items
-rw-r--r-- 1 gil_fernandes hadoop 0 2018-03-24 16:39 /tmp/simple_list/_SUCCESS
-rw-r--r-- 1 gil_fernandes hadoop 4 2018-03-24 16:39 /tmp/simple_list/part-00000
-rw-r--r-- 1 gil_fernandes hadoop 6 2018-03-24 16:39 /tmp/simple_list/part-00001
Finally you print out the content of the files using hdfs again:
hdfs dfs -cat /tmp/simple_list/part-*
This prints out:
1
2
3
4
5
You can also use the hdfs dfs cat command to get the file from HDFS:
hdfs dfs -cat /tmp/simple_list/part-* > simple_list.txt
Update
If you want to run saveAsTextFile using the file:// protocol you should typically run spark-shell under the user which runs you Spark cluster.
These are the steps I have used to save as a text file on the local file system:
Bash:
sudo -i
su - yarn
spark-shell
Spark-shell:
val nums = sc.parallelize(List(1,2,3,4,5))
nums.saveAsTextFile("file:///tmp/simple_list_12")
:quit
Bash:
ls /tmp/simple_list_12
Output of the last command:
part-00000 part-00001 _SUCCESS
I'm new in Opendaylight and I was following a tutorial to build a simple Hello World project (this tutorial) but when I run the project with ./karaf and check if the module is initialized with log:display | grep hello I obtain this error:
2017-08-04 12:43:57,159 | INFO | Event Dispatcher | YangTextSchemaContextResolver | 55 - org.opendaylight.yangtools.yang-parser-impl - 1.0.2.Boron-SR2 | Provided module name /META-INF/yang/hello.yang#0000-00-00.yang does not match actual text hello#2015-01-05.yang, corrected
2017-08-04 12:44:01,928 | INFO | Event Dispatcher | YangTextSchemaContextResolver | 55 - org.opendaylight.yangtools.yang-parser-impl - 1.0.2.Boron-SR2 | Provided module name /META-INF/yang/hello.yang#0000-00-00.yang does not match actual text hello#2015-01-05.yang, corrected
2017-08-04 12:44:08,295 | INFO | Event Dispatcher | BlueprintBundleTracker | 148 - org.opendaylight.controller.blueprint - 0.5.2.Boron-SR2 | Creating blueprint container for bundle org.opendaylight.hello.impl_0.1.0.SNAPSHOT [174] with paths [bundleentry://174.fwk592688102/org/opendaylight/blueprint/impl-blueprint.xml]
2017-08-04 12:44:08,318 | ERROR | Event Dispatcher | BlueprintContainerImpl | 15 - org.apache.aries.blueprint.core - 1.6.1 | Unable to start blueprint container for bundle org.opendaylight.hello.impl/0.1.0.SNAPSHOT
With the diag command I get this output:
opendaylight-user#root>diag
hello-impl (174)
----------------
Status: Failure
Blueprint
4/08/17 14:12
Exception:
Unable to validate xml
org.osgi.service.blueprint.container.ComponentDefinitionException: Unable to validate xml
at org.apache.aries.blueprint.parser.Parser.validate(Parser.java:349)
at org.apache.aries.blueprint.parser.Parser.validate(Parser.java:336)
at org.apache.aries.blueprint.container.BlueprintContainerImpl.doRun(BlueprintContainerImpl.java:343)
at org.apache.aries.blueprint.container.BlueprintContainerImpl.run(BlueprintContainerImpl.java:276)
at org.apache.aries.blueprint.container.BlueprintExtender.createContainer(BlueprintExtender.java:300)
at org.apache.aries.blueprint.container.BlueprintExtender.createContainer(BlueprintExtender.java:269)
at org.apache.aries.blueprint.container.BlueprintExtender.access$900(BlueprintExtender.java:68)
at org.apache.aries.blueprint.container.BlueprintExtender$BlueprintContainerServiceImpl.createContainer(BlueprintExtender.java:602)
at org.opendaylight.controller.blueprint.BlueprintBundleTracker.modifiedBundle(BlueprintBundleTracker.java:178)
at org.opendaylight.controller.blueprint.BlueprintBundleTracker.addingBundle(BlueprintBundleTracker.java:159)
at org.opendaylight.controller.blueprint.BlueprintBundleTracker.addingBundle(BlueprintBundleTracker.java:51)
at org.osgi.util.tracker.BundleTracker$Tracked.customizerAdding(BundleTracker.java:467)
at org.osgi.util.tracker.BundleTracker$Tracked.customizerAdding(BundleTracker.java:414)
at org.osgi.util.tracker.AbstractTracked.trackAdding(AbstractTracked.java:256)
at org.osgi.util.tracker.AbstractTracked.track(AbstractTracked.java:229)
at org.osgi.util.tracker.BundleTracker$Tracked.bundleChanged(BundleTracker.java:443)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.dispatchEvent(BundleContextImpl.java:847)
at org.eclipse.osgi.framework.eventmgr.EventManager.dispatchEvent(EventManager.java:230)
at org.eclipse.osgi.framework.eventmgr.ListenerQueue.dispatchEventSynchronous(ListenerQueue.java:148)
at org.eclipse.osgi.framework.internal.core.Framework.publishBundleEventPrivileged(Framework.java:1568)
at org.eclipse.osgi.framework.internal.core.Framework.publishBundleEvent(Framework.java:1504)
at org.eclipse.osgi.framework.internal.core.Framework.publishBundleEvent(Framework.java:1499)
at org.eclipse.osgi.framework.internal.core.BundleHost.startWorker(BundleHost.java:391)
at org.eclipse.osgi.framework.internal.core.AbstractBundle.resume(AbstractBundle.java:390)
at org.eclipse.osgi.framework.internal.core.Framework.resumeBundle(Framework.java:1176)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:559)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:544)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.incFWSL(StartLevelManager.java:457)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.doSetStartLevel(StartLevelManager.java:243)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:438)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:1)
at org.eclipse.osgi.framework.eventmgr.EventManager.dispatchEvent(EventManager.java:230)
at org.eclipse.osgi.framework.eventmgr.EventManager$EventThread.run(EventManager.java:340)
Caused by: org.xml.sax.SAXParseException: cvc-complex-type.2.3: Element 'blueprint' cannot have character [children], because the type's content type is element-only.
at org.apache.xerces.util.ErrorHandlerWrapper.createSAXParseException(Unknown Source)
at org.apache.xerces.util.ErrorHandlerWrapper.error(Unknown Source)
at org.apache.xerces.impl.XMLErrorReporter.reportError(Unknown Source)
at org.apache.xerces.impl.XMLErrorReporter.reportError(Unknown Source)
at org.apache.xerces.impl.XMLErrorReporter.reportError(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator$XSIErrorReporter.reportError(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.reportSchemaError(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.elementLocallyValidComplexType(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.elementLocallyValidType(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.processElementContent(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.handleEndElement(Unknown Source)
at org.apache.xerces.impl.xs.XMLSchemaValidator.endElement(Unknown Source)
at org.apache.xerces.jaxp.validation.DOMValidatorHelper.finishNode(Unknown Source)
at org.apache.xerces.jaxp.validation.DOMValidatorHelper.validate(Unknown Source)
at org.apache.xerces.jaxp.validation.DOMValidatorHelper.validate(Unknown Source)
at org.apache.xerces.jaxp.validation.ValidatorImpl.validate(Unknown Source)
at javax.xml.validation.Validator.validate(Unknown Source)
at org.apache.aries.blueprint.parser.Parser.validate(Parser.java:346)
... 32 more
As I've said, I was following the tutorial so my files are exactly the same to the opendaylight link (this is the repository I've created GitHub).
I think it's important to say how I've generated. This is de code:
mvn archetype:generate -DarchetypeGroupId=org.opendaylight.controller -DarchetypeArtifactId=opendaylight-startup-archetype -DarchetypeRepository=https://nexus.opendaylight.org/content/repositories/public/ -DarchetypeCatalog=remote -DarchetypeVersion=1.2.2-Boron-SR2
Thank you all,
Daniel Romero Morcillo
In the logs you provided:
Element 'blueprint' cannot have character [children], because the type's content type is element-only.
So I think there are simply some errors (invalid XML) in your blueprint file.
If it is exactly like the one in the link you provided [here] there are some extra characters in line 19
Calling chmod is not sticking for the execute permissions.
-bash-3.2$ hadoop dfs -ls /user/myuser/myapp/
Found 1 items
-rw-rw-rw- 3 myuser myuser 2056 2013-09-13 23:14 /user/myuser/myapp/paf-rules.properties
-bash-3.2$ hadoop dfs -chmod ugo+x /user/myuser/myapp/*
-bash-3.2$ hadoop dfs -ls /user/myuser/myapp/
Found 1 items
-rw-rw-rw- 3 myuser myuser 2056 2013-09-13 23:14 /user/myuser/myapp/paf-rules.properties
We get a permissions error in which HDFS complains that the Execute permission is missing:
Unable to open readFileFromHdfs based rules file /user/myuser/dqm/paf-rules.properties : org.apache.hadoop.security.AccessControlException: org.apache.hadoop.security.AccessControlException: Permission denied: user=myuser, access=EXECUTE, inode="paf-rules.properties":myuser:myuser:rw-rw-rw-
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:95)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:57)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:960)
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:524)
at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:768)
at com.apple.ist.gbi.edw.etl.fwrk.udf.dqm.UDFPafDqm.readFileFromHdfs(UDFPafDqm.java:81)
at com.apple.ist.gbi.edw.etl.fwrk.udf.dqm.UDFPafDqm.<init>(UDFPafDqm.java:110)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:39)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
at java.lang.reflect.Constructor.newInstance(Constructor.java:513)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:113)
at org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge.initialize(GenericUDFBridge.java:145)
at org.apache.hadoop.hive.ql.udf.generic.GenericUDF.initializeAndFoldConstants(GenericUDF.java:98)
at org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc.newInstance(ExprNodeGenericFuncDesc.java:214)
at org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.getXpathOrFuncExprNodeDesc(TypeCheckProcFactory.java:767)
at org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$DefaultExprProcessor.process(TypeCheckProcFactory.java:888)
at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:125)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
at org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory.genExprNode(TypeCheckProcFactory.java:165)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genExprNodeDesc(SemanticAnalyzer.java:7755)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:2310)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genSelectPlan(SemanticAnalyzer.java:2112)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPostGroupByBodyPlan(SemanticAnalyzer.java:6165)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genBodyPlan(SemanticAnalyzer.java:6136)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:6762)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:6702)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:6723)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:7531)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:244)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:431)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:336)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:909)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:215)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:406)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:689)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:557)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.util.RunJar.main(RunJar.java:156)
Caused by: org.apache.hadoop.ipc.RemoteException: org.apache.hadoop.security.AccessControlException: Permission denied: user=myuser, access=EXECUTE, inode="paf-rules.properties":myuser:myuser:rw-rw-rw-
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:199)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:155)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:125)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:5222)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkTraverse(FSNamesystem.java:5201)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:2027)
at org.apache.hadoop.hdfs.server.namenode.NameNode.getFileInfo(NameNode.java:825)
at sun.reflect.GeneratedMethodAccessor28.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1388)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1384)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1122)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1382)
at org.apache.hadoop.ipc.Client.call(Client.java:1092)
at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
at $Proxy6.getFileInfo(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)
at $Proxy6.getFileInfo(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:958)
... 42 more