Deploying dbexpress Delphi application - oracle

I'm trying to deploy an oracle application to another machine that uses the dbxora.dll file from dbexpress. I have included the file with the program but when i run the program and try to execute a query, it returns with an error that it could not load the dbxora.dll.
I have the following all in a folder together:
dbxora.dll
Application.exe
dbxdrivers.ini
dbxconnection.ini
I have exhausted myself looking every where I possibly can think of to figure out how exactly to do this. I just can't figure it out.
As requested here are the contents of the dbxdrivers.ini file:
[Installed Drivers]
DBXTrace=1
DBXPool=1
DataSnap=1
ASA=1
ASE=1
DB2=1
Firebird=1
Informix=1
Interbase=1
MSSQL=1
MySQL=1
Odbc=1
Oracle=1
SQLite=1
[DataSnap]
DriverUnit=Data.DBXDataSnap
DriverAssemblyLoader=Borland.Data.TDBXClientDriverLoader,Borland.Data.DbxClientDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
Port=211
[ASA]
DriverUnit=Data.DBXSybaseASA
DriverPackageLoader=TDBXDynalinkDriverLoader,DbxCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXSybaseASAMetaDataCommandFactory,DbxSybaseASADriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXSybaseASAMetaDataCommandFactory,Borland.Data.DbxSybaseASADriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverASA
LibraryName=dbxasa.dll
LibraryNameOsx=libsqlasa.dylib
VendorLib=dbodbc*.dll
VendorLibWin64=dbodbc*.dll
VendorLibOsx=libdbodbc12.dylib
HostName=ServerName
Database=DBNAME
User_Name=user
Password=password
Port=
ConnectionString=
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
[ASA TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[ASE]
DriverUnit=Data.DBXSybaseASE
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXSybaseASEMetaDataCommandFactory,DbxSybaseASEDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXSybaseASEMetaDataCommandFactory,Borland.Data.DbxSybaseASEDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverASE
LibraryName=dbxase.dll
VendorLib=libct.dll;libcs.dll
VendorLibWin64=libsybct64.dll;libsybcs64.dll
HostName=ServerName
DataBase=Database Name
User_Name=user
Password=password
BlobSize=-1
TDS Packet Size=512
Client HostName=
Client AppName=
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
[ASE TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[DBXPool]
DelegateDriver=True
DriverName=DBXPool
DriverUnit=Data.DBXPool
DriverPackageLoader=TDBXPoolDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXPoolDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
[DBXTrace]
DelegateDriver=True
DriverName=DBXTrace
DriverUnit=Data.DBXTrace
DriverPackageLoader=TDBXTraceDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXTraceDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
[AutoCommit]
False=0
True=1
[BlockingMode]
False=0
True=1
[WaitOnLocks]
False=1
True=0
[CommitRetain]
False=0
True=1
[OS Authentication]
False=0
True=1
[Multiple Transaction]
False=0
True=1
[Trim Char]
False=0
True=1
[SQLDialect]
1=0
2=1
3=2
[DB2]
DriverUnit=Data.DBXDb2
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXDb2MetaDataCommandFactory,DbxDb2Driver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXDb2MetaDataCommandFactory,Borland.Data.DbxDb2Driver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverDB2
LibraryName=dbxdb2.dll
VendorLib=db2cli.dll
VendorLibWin64=db2cli64.dll
Database=DBNAME
User_Name=user
Password=password
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
Decimal Separator=.
[DB2 TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[Firebird]
DriverUnit=Data.DBXFirebird
DriverPackageLoader=TDBXDynalinkDriverLoader,DbxCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXFirebirdMetaDataCommandFactory,DbxFirebirdDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXFirebirdMetaDataCommandFactory,Borland.Data.DbxFirebirdDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverINTERBASE
LibraryName=dbxfb.dll
LibraryNameOsx=libsqlfb.dylib
VendorLib=fbclient.dll
VendorLibWin64=fbclient.dll
VendorLibOsx=/Library/Frameworks/Firebird.framework/Firebird
BlobSize=-1
CommitRetain=False
Database=database.fdb
ErrorResourceFile=
LocaleCode=0000
Password=masterkey
RoleName=RoleName
ServerCharSet=
SQLDialect=3
IsolationLevel=ReadCommitted
User_Name=sysdba
WaitOnLocks=True
Trim Char=False
[Informix]
DriverUnit=Data.DBXInformix
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXInformixMetaDataCommandFactory,DbxInformixDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXInformixMetaDataCommandFactory,Borland.Data.DbxInformixDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverINFORMIX
LibraryName=dbxinf.dll
LibraryNameOsx=libsqlinf.dylib
VendorLib=isqlb09a.dll
VendorLibWin64=isqlt09a.dll
VendorLibOsx=libifcli.dylib
HostName=ServerName
DataBase=Database Name
User_Name=user
Password=password
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
Trim Char=False
[Informix TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[Interbase]
DriverUnit=Data.DBXInterBase
DriverPackageLoader=TDBXDynalinkDriverLoader,DbxCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXInterbaseMetaDataCommandFactory,DbxInterBaseDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXInterbaseMetaDataCommandFactory,Borland.Data.DbxInterBaseDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverINTERBASE
LibraryName=dbxint.dll
LibraryNameOsx=libsqlib.dylib
VendorLib=GDS32.DLL
VendorLibWin64=ibclient64.dll
VendorLibOsx=libgds.dylib
BlobSize=-1
CommitRetain=False
Database=database.gdb
ErrorResourceFile=
LocaleCode=0000
Password=masterkey
RoleName=RoleName
ServerCharSet=
SQLDialect=3
IsolationLevel=ReadCommitted
User_Name=sysdba
WaitOnLocks=True
Trim Char=False
[IBToGo]
DriverUnit=Data.DBXInterBase
DriverPackageLoader=TDBXDynalinkDriverLoader,DbxCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXInterbaseMetaDataCommandFactory,DbxInterBaseDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXInterbaseMetaDataCommandFactory,Borland.Data.DbxInterBaseDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverINTERBASE
LibraryName=dbxint.dll
LibraryNameOsx=libsqlib.dylib
VendorLib=ibtogo.dll
VendorLibWin64=ibtogo64.dll
VendorLibOsx=libibtogo.dylib
BlobSize=-1
CommitRetain=False
Database=database.gdb
ErrorResourceFile=
LocaleCode=0000
Password=masterkey
RoleName=RoleName
ServerCharSet=
SQLDialect=3
IsolationLevel=ReadCommitted
User_Name=sysdba
WaitOnLocks=True
Trim Char=False
AutoUnloadDriver=True
[Interbase TransIsolation]
ReadCommited=1
RepeatableRead=2
[MSSQL]
SchemaOverride=%.dbo
DriverUnit=Data.DBXMSSQL
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXMsSqlMetaDataCommandFactory,DbxMSSQLDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXMsSqlMetaDataCommandFactory,Borland.Data.DbxMSSQLDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverMSSQL
LibraryName=dbxmss.dll
VendorLib=sqlncli10.dll
VendorLibWin64=sqlncli10.dll
HostName=ServerName
DataBase=Database Name
User_Name=user
Password=password
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
OS Authentication=False
Prepare SQL=False
[MSSQL9]
SchemaOverride=%.dbo
DriverUnit=DBXMSSQL
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXMsSqlMetaDataCommandFactory,DbxMSSQLDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXMsSqlMetaDataCommandFactory,Borland.Data.DbxMSSQLDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverMSSQL
LibraryName=dbxmss9.dll
VendorLib=sqlncli.dll
VendorLibWin64=sqlncli.dll
HostName=ServerName
DataBase=Database Name
User_Name=user
Password=password
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
OS Authentication=False
Prepare SQL=False
[MSSQL TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[MYSQL]
DriverUnit=Data.DBXMySQL
DriverPackageLoader=TDBXDynalinkDriverLoader,DbxCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXMySqlMetaDataCommandFactory,DbxMySQLDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXMySqlMetaDataCommandFactory,Borland.Data.DbxMySQLDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverMYSQL
LibraryName=dbxmys.dll
LibraryNameOsx=libsqlmys.dylib
VendorLib=LIBMYSQL.dll
VendorLibWin64=libmysql.dll
VendorLibOsx=libmysqlclient.dylib
BlobSize=-1
Database=DBNAME
ErrorResourceFile=
HostName=ServerName
LocaleCode=0000
Password=password
User_Name=user
Compressed=False
Encrypted=False
[Odbc]
DriverUnit=Data.DBXOdbc
DriverPackageLoader=TDBXOdbcDriverLoader,DBXOdbcDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXOdbcDriverLoader,Borland.Data.DbxOdbcDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXOdbcMetaDataCommandFactory,DbxOdbcDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXOdbcMetaDataCommandFactory,Borland.Data.DbxOdbcDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
[Oracle]
DriverUnit=Data.DBXOracle
DriverPackageLoader=TDBXDynalinkDriverLoader,DBXCommonDriver170.bpl
DriverAssemblyLoader=Borland.Data.TDBXDynalinkDriverLoader,Borland.Data.DbxCommonDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
MetaDataPackageLoader=TDBXOracleMetaDataCommandFactory,DbxOracleDriver170.bpl
MetaDataAssemblyLoader=Borland.Data.TDBXOracleMetaDataCommandFactory,Borland.Data.DbxOracleDriver,Version=17.0.0.0,Culture=neutral,PublicKeyToken=91d62ebb5b0d1b1b
GetDriverFunc=getSQLDriverORACLE
LibraryName=dbxora.dll
LibraryNameOsx=libsqlora.dylib
VendorLib=oci.dll
VendorLibWin64=oci.dll
VendorLibOsx=libociei.dylib
DataBase=Database Name
User_Name=user
Password=password
BlobSize=-1
ErrorResourceFile=
LocaleCode=0000
IsolationLevel=ReadCommitted
RowsetSize=20
OS Authentication=False
Multiple Transaction=False
Trim Char=False
Decimal Separator=.
[Oracle TransIsolation]
DirtyRead=0
ReadCommited=1
RepeatableRead=2
[Sqlite]
DriverUnit=Data.DbxSqlite
DriverPackageLoader=TDBXSqliteDriverLoader,DBXSqliteDriver170.bpl
MetaDataPackageLoader=TDBXSqliteMetaDataCommandFactory,DbxSqliteDriver170.bpl
Also here is the error message i am getting in XP
The application or DLL \Application\Path\dbxora.dll is not a valid windows image. Please check this against your installation diskette
Then on both Windows xp and windows 7 i get this
Unable to load dbxora.dll (Error Code 193). It may be missing from the system path

After searching for hours and finally looking up the error code, i have found this result.
I was using the 64-bit version of the dll instead of the 32-bit. After correcting this issue it works perfect.
Thanks to everyone for all their help.

Related

Spark On windows gives error in saveAsTextFile

I have installed Spark on my laptop and I am trying to execute some very basic commands. Most of them work except .saveAsTextFile. In pyshell I wrote
nums=sc.parallellize([1,2,3])
nums.saveAsTextFile("file:///C:/Java/ouput")
The last statement of saveAsTextFile gives me the following error
[Stage 0:> (0 + 8)
/ 8]2018-03-24 11:48:14 ERROR Executor:91 - Exception in task 6.0 in stage
0.0 (TID 6)
ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2018-03-24 11:48:14 WARN TaskSetManager:66 - Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
2018-03-24 11:48:14 ERROR TaskSetManager:70 - Task 6 in stage 0.0 failed 1 times; aborting job
2018-03-24 11:48:14 ERROR SparkHadoopWriter:91 - Aborting job job_20180324114813_0003.
org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 1 times, most recent failure: Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2080)
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1493)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1472)
at org.apache.spark.api.java.JavaRDDLike$class.saveAsTextFile(JavaRDDLike.scala:550)
at org.apache.spark.api.java.AbstractJavaRDDLike.saveAsTextFile(JavaRDDLike.scala:45)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:748)
Caused by: ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
... 1 more
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "C:\spark-2.3.0-bin-hadoop2.7\python\pyspark\rdd.py", line 1568, in saveAsTextFile
keyed._jrdd.map(self.ctx._jvm.BytesToString()).saveAsTextFile(path)
File "C:\spark-2.3.0-bin-hadoop2.7\python\lib\py4j-0.10.6-src.zip\py4j\java_gateway.py", line 1160, in __call__
File "C:\spark-2.3.0-bin-hadoop2.7\python\pyspark\sql\utils.py", line 63, in deco
return f(*a, **kw)
File "C:\spark-2.3.0-bin-hadoop2.7\python\lib\py4j-0.10.6-src.zip\py4j\protocol.py", line 320, in get_return_value
py4j.protocol.Py4JJavaError: An error occurred while calling o39.saveAsTextFile.
: org.apache.spark.SparkException: Job aborted.
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:96)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1096)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1094)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:1067)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:1032)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply$mcV$sp(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$1.apply(PairRDDFunctions.scala:958)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:957)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply$mcV$sp(RDD.scala:1493)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$1.apply(RDD.scala:1472)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
at org.apache.spark.rdd.RDD.withScope(RDD.scala:363)
at org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1472)
at org.apache.spark.api.java.JavaRDDLike$class.saveAsTextFile(JavaRDDLike.scala:550)
at org.apache.spark.api.java.AbstractJavaRDDLike.saveAsTextFile(JavaRDDLike.scala:45)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 6 in stage 0.0 failed 1 times, most recent failure: Lost task 6.0 in stage 0.0 (TID 6, localhost, executor driver): ExitCodeException exitCode=-1073741701:
at org.apache.hadoop.util.Shell.runCommand(Shell.java:582)
at org.apache.hadoop.util.Shell.run(Shell.java:479)
at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:773)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:733)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:225)
at org.apache.hadoop.fs.RawLocalFileSystem$LocalFSFileOutputStream.<init>(RawLocalFileSystem.java:209)
at org.apache.hadoop.fs.RawLocalFileSystem.createOutputStreamWithMode(RawLocalFileSystem.java:307)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:296)
at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:328)
at org.apache.hadoop.fs.ChecksumFileSystem$ChecksumFSOutputSummer.<init>(ChecksumFileSystem.java:398)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:461)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:804)
at org.apache.hadoop.mapred.TextOutputFormat.getRecordWriter(TextOutputFormat.java:123)
at org.apache.spark.internal.io.HadoopMapRedWriteConfigUtil.initWriter(SparkHadoopWriter.scala:224)
at org.apache.spark.internal.io.SparkHadoopWriter$.org$apache$spark$internal$io$SparkHadoopWriter$$executeTask(SparkHadoopWriter.scala:118)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:79)
at org.apache.spark.internal.io.SparkHadoopWriter$$anonfun$3.apply(SparkHadoopWriter.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Driver stacktrace:
at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
at scala.Option.foreach(Option.scala:257)
at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769)
at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758)
at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2080)
at org.apache.spark.internal.io.SparkHadoopWriter$.write(SparkHadoopWriter.scala:78)
... 41 more
The error message was too long and mostly repetitive so I posted most of the it but couldn't include the last part due to size constraint.
This seems very basic and still somehow doesn't work. I am not sure what I might be doing wrong. Most of the suggestions that I have found until now have not worked for me. Any help will be greatly appreciated.
Have you set winutils.exe correctly?
Set the HADOOP_HOME environment variable correctly and make sure that winutils.exe can execute. If it can't, you may need to download MSVC Runtime Library.
Actually you should write to HDFS (Hadoop File System) as default.
I have simulated a session on a single cluster node. Here is the full list of commands creating the list, writing it to HDFS and finally printing out the results on the console using hdfs:
spark-shell
After the shell has started you type:
val nums = sc.parallelize(List(1,2,3,4,5))
nums.saveAsTextFile("/tmp/simple_list")
:quit
Now we read the data from HDFS (Hadoop File System):
hdfs dfs -ls /tmp/simple_list
This prints something like:
Found 3 items
-rw-r--r-- 1 gil_fernandes hadoop 0 2018-03-24 16:39 /tmp/simple_list/_SUCCESS
-rw-r--r-- 1 gil_fernandes hadoop 4 2018-03-24 16:39 /tmp/simple_list/part-00000
-rw-r--r-- 1 gil_fernandes hadoop 6 2018-03-24 16:39 /tmp/simple_list/part-00001
Finally you print out the content of the files using hdfs again:
hdfs dfs -cat /tmp/simple_list/part-*
This prints out:
1
2
3
4
5
You can also use the hdfs dfs cat command to get the file from HDFS:
hdfs dfs -cat /tmp/simple_list/part-* > simple_list.txt
Update
If you want to run saveAsTextFile using the file:// protocol you should typically run spark-shell under the user which runs you Spark cluster.
These are the steps I have used to save as a text file on the local file system:
Bash:
sudo -i
su - yarn
spark-shell
Spark-shell:
val nums = sc.parallelize(List(1,2,3,4,5))
nums.saveAsTextFile("file:///tmp/simple_list_12")
:quit
Bash:
ls /tmp/simple_list_12
Output of the last command:
part-00000 part-00001 _SUCCESS

Visual Studio Team Services Build FTP Upload Error

I have an Angular application that I'm using VSTS to continuously deploy. The problem is that when I check the "Preserve file paths" option in the Advanced section of the FTP Upload step, I get the following error "Unable to create remote directory: / due to error: Error: Syntax error in parameters or arguments." If I uncheck "Preserve file paths", the step runs without error. The files are uploaded to the root directory just without any folder structure. I have "Remote directory" set to "/".
FTP Log
2018-03-09T19:15:50.3143303Z ##[section]Starting: FTP Upload: dist
2018-03-09T19:15:50.3146572Z ==============================================================================
2018-03-09T19:15:50.3146703Z Task : FTP Upload
2018-03-09T19:15:50.3146804Z Description : FTP Upload
2018-03-09T19:15:50.3146929Z Version : 1.128.0
2018-03-09T19:15:50.3147035Z Author : Microsoft Corporation
2018-03-09T19:15:50.3147182Z Help : Upload files to a remote machine using the File Transfer Protocol (FTP), or securely with FTPS. [More Information](http://go.microsoft.com/fwlink/?LinkId=809084).
2018-03-09T19:15:50.3147375Z ==============================================================================
2018-03-09T19:15:50.5405453Z ##[debug]agent.workFolder=D:\a
2018-03-09T19:15:50.5433886Z ##[debug]loading inputs and endpoints
2018-03-09T19:15:50.5442724Z ##[debug]loading ENDPOINT_AUTH_PARAMETER_SYSTEMVSSCONNECTION_ACCESSTOKEN
2018-03-09T19:15:50.5453315Z ##[debug]loading ENDPOINT_AUTH_SCHEME_SYSTEMVSSCONNECTION
2018-03-09T19:15:50.5454681Z ##[debug]loading ENDPOINT_AUTH_SYSTEMVSSCONNECTION
2018-03-09T19:15:50.5456230Z ##[debug]loading INPUT_CLEAN
2018-03-09T19:15:50.5458201Z ##[debug]loading INPUT_CLEANCONTENTS
2018-03-09T19:15:50.5460556Z ##[debug]loading INPUT_CREDSTYPE
2018-03-09T19:15:50.5461895Z ##[debug]loading INPUT_FILEPATTERNS
2018-03-09T19:15:50.5462991Z ##[debug]loading INPUT_OVERWRITE
2018-03-09T19:15:50.5464593Z ##[debug]loading INPUT_PASSWORD
2018-03-09T19:15:50.5465181Z ##[debug]loading INPUT_PRESERVEPATHS
2018-03-09T19:15:50.5467054Z ##[debug]loading INPUT_REMOTEPATH
2018-03-09T19:15:50.5469178Z ##[debug]loading INPUT_ROOTFOLDER
2018-03-09T19:15:50.5470250Z ##[debug]loading INPUT_SERVERURL
2018-03-09T19:15:50.5471593Z ##[debug]loading INPUT_TRUSTSSL
2018-03-09T19:15:50.5473030Z ##[debug]loading INPUT_USERNAME
2018-03-09T19:15:50.5475915Z ##[debug]loaded 15
2018-03-09T19:15:50.5900036Z ##[debug]check path : D:\a\_tasks\FtpUpload_6f8c69a5-b023-428e-a125-fccf4efcb929\1.128.0\task.json
2018-03-09T19:15:50.5904137Z ##[debug]set resource file to: D:\a\_tasks\FtpUpload_6f8c69a5-b023-428e-a125-fccf4efcb929\1.128.0\task.json
2018-03-09T19:15:50.5904503Z ##[debug]system.culture=en-US
2018-03-09T19:15:50.5927958Z ##[debug]credsType=inputs
2018-03-09T19:15:50.5931350Z ##[debug]credsType=inputs
2018-03-09T19:15:50.5932838Z ##[debug]serverUrl=**********
2018-03-09T19:15:50.6423877Z ##[debug]username=*******
2018-03-09T19:15:50.6424279Z ##[debug]password=*******
2018-03-09T19:15:50.6424486Z ##[debug]rootFolder=D:\a\1\s\dist
2018-03-09T19:15:50.6424689Z ##[debug]filePatterns=**
2018-03-09T19:15:50.6424889Z ##[debug]remotePath=\
2018-03-09T19:15:50.6425086Z ##[debug]clean=false
2018-03-09T19:15:50.6425562Z ##[debug]cleanContents=false
2018-03-09T19:15:50.6425764Z ##[debug]overwrite=true
2018-03-09T19:15:50.6427302Z ##[debug]preservePaths=true
2018-03-09T19:15:50.6428647Z ##[debug]trustSSL=false
2018-03-09T19:15:50.6433816Z ##[debug]Searching for files to upload
2018-03-09T19:15:50.6437442Z ##[debug]find findPath=D:\a\1\s\dist, options=undefined
2018-03-09T19:15:50.6448064Z ##[debug] D:\a\1\s\dist
2018-03-09T19:15:50.6449295Z ##[debug] is a directory
2018-03-09T19:15:50.6456683Z ##[debug] D:\a\1\s\dist\0.a271095077847919f949.chunk.js
2018-03-09T19:15:50.6457050Z ##[debug] is a file
2018-03-09T19:15:50.6457333Z ##[debug] D:\a\1\s\dist\3rdpartylicenses.txt
2018-03-09T19:15:50.6458297Z ##[debug] is a file
2018-03-09T19:15:50.6458605Z ##[debug] D:\a\1\s\dist\assets
2018-03-09T19:15:50.6459440Z ##[debug] is a directory
2018-03-09T19:15:50.6462139Z ##[debug] D:\a\1\s\dist\assets\fonts
2018-03-09T19:15:50.6463091Z ##[debug] is a directory
2018-03-09T19:15:50.6463391Z ##[debug] D:\a\1\s\dist\assets\fonts\MASTOD__.ttf
2018-03-09T19:15:50.6464026Z ##[debug] is a file
2018-03-09T19:15:50.6464357Z ##[debug] D:\a\1\s\dist\assets\images
2018-03-09T19:15:50.6464622Z ##[debug] is a directory
2018-03-09T19:15:50.6465358Z ##[debug] D:\a\1\s\dist\assets\images\horse_chasing_tail.gif
2018-03-09T19:15:50.6465582Z ##[debug] is a file
2018-03-09T19:15:50.6468019Z ##[debug] D:\a\1\s\dist\assets\images\Yawning_horse.jpg
2018-03-09T19:15:50.6468278Z ##[debug] is a file
2018-03-09T19:15:50.6469393Z ##[debug] D:\a\1\s\dist\favicon.ico
2018-03-09T19:15:50.6469992Z ##[debug] is a file
2018-03-09T19:15:50.6470357Z ##[debug] D:\a\1\s\dist\index.html
2018-03-09T19:15:50.6470493Z ##[debug] is a file
2018-03-09T19:15:50.6470652Z ##[debug] D:\a\1\s\dist\inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:50.6471193Z ##[debug] is a file
2018-03-09T19:15:50.6471369Z ##[debug] D:\a\1\s\dist\main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:50.6471530Z ##[debug] is a file
2018-03-09T19:15:50.6471679Z ##[debug] D:\a\1\s\dist\polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:50.6472451Z ##[debug] is a file
2018-03-09T19:15:50.6472599Z ##[debug] D:\a\1\s\dist\styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:50.6473154Z ##[debug] is a file
2018-03-09T19:15:50.6473332Z ##[debug]15 results
2018-03-09T19:15:50.6482875Z ##[debug]searching for files using: 1 filePatterns: **
2018-03-09T19:15:50.6483815Z ##[debug]win: Win
2018-03-09T19:15:50.6484105Z ##[debug]Candidates found for match: 15
2018-03-09T19:15:50.6484733Z ##[debug]file: D:\a\1\s\dist
2018-03-09T19:15:50.6486022Z ##[debug]file: D:\a\1\s\dist\0.a271095077847919f949.chunk.js
2018-03-09T19:15:50.6486828Z ##[debug]file: D:\a\1\s\dist\3rdpartylicenses.txt
2018-03-09T19:15:50.6487106Z ##[debug]file: D:\a\1\s\dist\assets
2018-03-09T19:15:50.6487816Z ##[debug]file: D:\a\1\s\dist\assets\fonts
2018-03-09T19:15:50.6488210Z ##[debug]file: D:\a\1\s\dist\assets\fonts\MASTOD__.ttf
2018-03-09T19:15:50.6488501Z ##[debug]file: D:\a\1\s\dist\assets\images
2018-03-09T19:15:50.6489321Z ##[debug]file: D:\a\1\s\dist\assets\images\horse_chasing_tail.gif
2018-03-09T19:15:50.6489592Z ##[debug]file: D:\a\1\s\dist\assets\images\Yawning_horse.jpg
2018-03-09T19:15:50.6491507Z ##[debug]file: D:\a\1\s\dist\favicon.ico
2018-03-09T19:15:50.6491670Z ##[debug]file: D:\a\1\s\dist\index.html
2018-03-09T19:15:50.6491836Z ##[debug]file: D:\a\1\s\dist\inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:50.6491987Z ##[debug]file: D:\a\1\s\dist\main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:50.6492939Z ##[debug]file: D:\a\1\s\dist\polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:50.6494252Z ##[debug]file: D:\a\1\s\dist\styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:50.6494547Z ##[debug]searching for files, pattern: D:\a\1\s\dist\**
2018-03-09T19:15:50.6494720Z ##[debug]match patterns: D:\a\1\s\dist\**
2018-03-09T19:15:50.6495418Z ##[debug]match options: [object Object]
2018-03-09T19:15:50.6497148Z ##[debug]applying pattern: D:\a\1\s\dist\**
2018-03-09T19:15:50.6533640Z ##[debug]matched 14 items
2018-03-09T19:15:50.6535574Z ##[debug]Found total matches: 14
2018-03-09T19:15:50.6539444Z ##[debug]adding file: D:\a\1\s\dist\0.a271095077847919f949.chunk.js
2018-03-09T19:15:50.6539661Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6540596Z ##[debug]adding file: D:\a\1\s\dist\3rdpartylicenses.txt
2018-03-09T19:15:50.6540872Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6542092Z ##[debug]adding folder: D:\a\1\s\dist\assets
2018-03-09T19:15:50.6543311Z ##[debug]adding folder: D:\a\1\s\dist\assets\fonts
2018-03-09T19:15:50.6543478Z ##[debug]adding file: D:\a\1\s\dist\assets\fonts\MASTOD__.ttf
2018-03-09T19:15:50.6543651Z ##[debug]adding folder: D:\a\1\s\dist\assets\fonts
2018-03-09T19:15:50.6544536Z ##[debug]adding folder: D:\a\1\s\dist\assets\images
2018-03-09T19:15:50.6544929Z ##[debug]adding file: D:\a\1\s\dist\assets\images\horse_chasing_tail.gif
2018-03-09T19:15:50.6545161Z ##[debug]adding folder: D:\a\1\s\dist\assets\images
2018-03-09T19:15:50.6545411Z ##[debug]adding file: D:\a\1\s\dist\assets\images\Yawning_horse.jpg
2018-03-09T19:15:50.6545618Z ##[debug]adding folder: D:\a\1\s\dist\assets\images
2018-03-09T19:15:50.6545856Z ##[debug]adding file: D:\a\1\s\dist\favicon.ico
2018-03-09T19:15:50.6546053Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6546460Z ##[debug]adding file: D:\a\1\s\dist\index.html
2018-03-09T19:15:50.6547110Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6547444Z ##[debug]adding file: D:\a\1\s\dist\inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:50.6547722Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6548107Z ##[debug]adding file: D:\a\1\s\dist\main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:50.6548401Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6548791Z ##[debug]adding file: D:\a\1\s\dist\polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:50.6549099Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6549477Z ##[debug]adding file: D:\a\1\s\dist\styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:50.6549758Z ##[debug]adding folder: D:\a\1\s\dist
2018-03-09T19:15:50.6565537Z ##[debug]number of files to upload: 15
2018-03-09T19:15:50.6566161Z ##[debug]files to upload: ["D:\\a\\1\\s\\dist","D:\\a\\1\\s\\dist\\0.a271095077847919f949.chunk.js","D:\\a\\1\\s\\dist\\3rdpartylicenses.txt","D:\\a\\1\\s\\dist\\assets","D:\\a\\1\\s\\dist\\assets\\fonts","D:\\a\\1\\s\\dist\\assets\\fonts\\MASTOD__.ttf","D:\\a\\1\\s\\dist\\assets\\images","D:\\a\\1\\s\\dist\\assets\\images\\Yawning_horse.jpg","D:\\a\\1\\s\\dist\\assets\\images\\horse_chasing_tail.gif","D:\\a\\1\\s\\dist\\favicon.ico","D:\\a\\1\\s\\dist\\index.html","D:\\a\\1\\s\\dist\\inline.29d7a9534cdf90ff9fc9.bundle.js","D:\\a\\1\\s\\dist\\main.8d76a8cead0b0a709f14.bundle.js","D:\\a\\1\\s\\dist\\polyfills.367619bafcf2473263d3.bundle.js","D:\\a\\1\\s\\dist\\styles.9c0ad738f18adc3d19ed.bundle.css"]
2018-03-09T19:15:50.6567580Z ##[debug]secure ftp=false
2018-03-09T19:15:50.6567893Z ##[debug]port not specifided, using default: 21
2018-03-09T19:15:50.6604879Z connecting to: ftp.site4now.net:21
2018-03-09T19:15:50.9158405Z ##[debug][connection] < '220 Serv-U FTP Server v14.0 ready...\r\n'
2018-03-09T19:15:50.9162775Z ##[debug][parser] < '220 Serv-U FTP Server v14.0 ready...\r\n'
2018-03-09T19:15:50.9163969Z ##[debug][parser] Response: code=220, buffer='Serv-U FTP Server v14.0 ready...'
2018-03-09T19:15:50.9168281Z ##[debug]ftp client greeting
2018-03-09T19:15:50.9168563Z connected: Serv-U FTP Server v14.0 ready...
2018-03-09T19:15:50.9171052Z ##[debug][connection] > 'USER horser'
2018-03-09T19:15:51.3074212Z ##[debug][connection] < '331 User name okay, need password.\r\n'
2018-03-09T19:15:51.3074625Z ##[debug][parser] < '331 User name okay, need password.\r\n'
2018-03-09T19:15:51.3081806Z ##[debug][parser] Response: code=331, buffer='User name okay, need password.'
2018-03-09T19:15:51.3090492Z ##[debug][connection] > 'PASS 19Dudley91!!'
2018-03-09T19:15:51.5104040Z ##[debug][connection] < '230 User logged in, proceed.\r\n'
2018-03-09T19:15:51.5104457Z ##[debug][parser] < '230 User logged in, proceed.\r\n'
2018-03-09T19:15:51.5148499Z ##[debug][parser] Response: code=230, buffer='User logged in, proceed.'
2018-03-09T19:15:51.5148916Z ##[debug][connection] > 'FEAT'
2018-03-09T19:15:51.6672205Z ##[debug][connection] < '211-Extensions supported\r\n UTF8\r\n OPTS MODE;MLST;UTF8\r\n CLNT\r\n CSID Name; Version;\r\n HOST domain\r\n SITE PSWD;SET;ZONE;CHMOD;MSG;EXEC;HELP\r\n AUTH TLS;SSL;TLS-C;TLS-P;\r\n PBSZ\r\n PROT\r\n CCC\r\n SSCN\r\n RMDA directoryname\r\n DSIZ\r\n AVBL\r\n EPRT\r\n EPSV\r\n MODE Z\r\n THMB BMP|JPEG|GIF|TIFF|PNG max_width max_height pathname\r\n REST STREAM\r\n SIZE\r\n MDTM\r\n MDTM YYYYMMDDHHMMSS[+-TZ];filename\r\n MFMT\r\n MFCT\r\n MFF Create;Modify;\r\n XCRC filename;start;end\r\n XMD5 filename;start;end\r\n XSHA1 filename;start;end\r\n XSHA256 filename;start;end\r\n XSHA512 filename;start;end\r\n COMB target;source_list\r\n MLST Type*;Size*;Create;Modify*;Perm;Win32.ea;Win32.dt;Win32.dl\r\n211 End (for details use "HELP commmand" where command is the command of interest)\r\n'
2018-03-09T19:15:51.6673106Z ##[debug][parser] < '211-Extensions supported\r\n UTF8\r\n OPTS MODE;MLST;UTF8\r\n CLNT\r\n CSID Name; Version;\r\n HOST domain\r\n SITE PSWD;SET;ZONE;CHMOD;MSG;EXEC;HELP\r\n AUTH TLS;SSL;TLS-C;TLS-P;\r\n PBSZ\r\n PROT\r\n CCC\r\n SSCN\r\n RMDA directoryname\r\n DSIZ\r\n AVBL\r\n EPRT\r\n EPSV\r\n MODE Z\r\n THMB BMP|JPEG|GIF|TIFF|PNG max_width max_height pathname\r\n REST STREAM\r\n SIZE\r\n MDTM\r\n MDTM YYYYMMDDHHMMSS[+-TZ];filename\r\n MFMT\r\n MFCT\r\n MFF Create;Modify;\r\n XCRC filename;start;end\r\n XMD5 filename;start;end\r\n XSHA1 filename;start;end\r\n XSHA256 filename;start;end\r\n XSHA512 filename;start;end\r\n COMB target;source_list\r\n MLST Type*;Size*;Create;Modify*;Perm;Win32.ea;Win32.dt;Win32.dl\r\n211 End (for details use "HELP commmand" where command is the command of interest)\r\n'
2018-03-09T19:15:51.6673867Z ##[debug][parser] Response: code=211, buffer='Extensions supported\r\n UTF8\r\n OPTS MODE;MLST;UTF8\r\n CLNT\r\n CSID Name; Version;\r\n HOST domain\r\n SITE PSWD;SET;ZONE;CHMOD;MSG;EXEC;HELP\r\n AUTH TLS;SSL;TLS-C;TLS-P;\r\n PBSZ\r\n PROT\r\n CCC\r\n SSCN\r\n RMDA directoryname\r\n DSIZ\r\n AVBL\r\n EPRT\r\n EPSV\r\n MODE Z\r\n THMB BMP|JPEG|GIF|TIFF|PNG max_width max_height pathname\r\n REST STREAM\r\n SIZE\r\n MDTM\r\n MDTM YYYYMMDDHHMMSS[+-TZ];filename\r\n MFMT\r\n MFCT\r\n MFF Create;Modify;\r\n XCRC filename;start;end\r\n XMD5 filename;start;end\r\n XSHA1 filename;start;end\r\n XSHA256 filename;start;end\r\n XSHA512 filename;start;end\r\n COMB target;source_list\r\n MLST Type*;Size*;Create;Modify*;Perm;Win32.ea;Win32.dt;Win32.dl\r\nEnd (for details use "HELP commmand" where command is the command of interest)'
2018-03-09T19:15:51.6675012Z ##[debug][connection] > 'TYPE I'
2018-03-09T19:15:51.7393884Z ##[debug][connection] < '200 Type set to I.\r\n'
2018-03-09T19:15:51.7394935Z ##[debug][parser] < '200 Type set to I.\r\n'
2018-03-09T19:15:51.7395185Z ##[debug][parser] Response: code=200, buffer='Type set to I.'
2018-03-09T19:15:51.7405484Z ##[debug]ftp client ready
2018-03-09T19:15:51.7405934Z uploading files to remote directory: \
2018-03-09T19:15:51.7408246Z ##[debug]uploading files
2018-03-09T19:15:51.7412830Z ##[debug]creating remote directory: \
2018-03-09T19:15:51.7421303Z ##[debug]file: D:\a\1\s\dist
2018-03-09T19:15:51.7421828Z ##[debug]remoteFile: /
2018-03-09T19:15:51.7423594Z ##[debug]creating remote directory: /
2018-03-09T19:15:51.7424259Z ##[debug]file: D:\a\1\s\dist\0.a271095077847919f949.chunk.js
2018-03-09T19:15:51.7424480Z ##[debug]remoteFile: /0.a271095077847919f949.chunk.js
2018-03-09T19:15:51.7425725Z ##[debug]uploading file: D:\a\1\s\dist\0.a271095077847919f949.chunk.js remote: /0.a271095077847919f949.chunk.js
2018-03-09T19:15:51.7429444Z ##[debug]file: D:\a\1\s\dist\3rdpartylicenses.txt
2018-03-09T19:15:51.7430055Z ##[debug]remoteFile: /3rdpartylicenses.txt
2018-03-09T19:15:51.7430290Z ##[debug]uploading file: D:\a\1\s\dist\3rdpartylicenses.txt remote: /3rdpartylicenses.txt
2018-03-09T19:15:51.7430529Z ##[debug]file: D:\a\1\s\dist\assets
2018-03-09T19:15:51.7430714Z ##[debug]remoteFile: /assets
2018-03-09T19:15:51.7431738Z ##[debug]creating remote directory: /assets
2018-03-09T19:15:51.7432669Z ##[debug]file: D:\a\1\s\dist\assets\fonts
2018-03-09T19:15:51.7433215Z ##[debug]remoteFile: /assets/fonts
2018-03-09T19:15:51.7433430Z ##[debug]creating remote directory: /assets/fonts
2018-03-09T19:15:51.7433619Z ##[debug]file: D:\a\1\s\dist\assets\fonts\MASTOD__.ttf
2018-03-09T19:15:51.7433863Z ##[debug]remoteFile: /assets/fonts/MASTOD__.ttf
2018-03-09T19:15:51.7434065Z ##[debug]uploading file: D:\a\1\s\dist\assets\fonts\MASTOD__.ttf remote: /assets/fonts/MASTOD__.ttf
2018-03-09T19:15:51.7434380Z ##[debug]file: D:\a\1\s\dist\assets\images
2018-03-09T19:15:51.7434614Z ##[debug]remoteFile: /assets/images
2018-03-09T19:15:51.7434830Z ##[debug]creating remote directory: /assets/images
2018-03-09T19:15:51.7435064Z ##[debug]file: D:\a\1\s\dist\assets\images\Yawning_horse.jpg
2018-03-09T19:15:51.7435271Z ##[debug]remoteFile: /assets/images/Yawning_horse.jpg
2018-03-09T19:15:51.7435475Z ##[debug]uploading file: D:\a\1\s\dist\assets\images\Yawning_horse.jpg remote: /assets/images/Yawning_horse.jpg
2018-03-09T19:15:51.7435694Z ##[debug]file: D:\a\1\s\dist\assets\images\horse_chasing_tail.gif
2018-03-09T19:15:51.7435895Z ##[debug]remoteFile: /assets/images/horse_chasing_tail.gif
2018-03-09T19:15:51.7436134Z ##[debug]uploading file: D:\a\1\s\dist\assets\images\horse_chasing_tail.gif remote: /assets/images/horse_chasing_tail.gif
2018-03-09T19:15:51.7436344Z ##[debug]file: D:\a\1\s\dist\favicon.ico
2018-03-09T19:15:51.7436522Z ##[debug]remoteFile: /favicon.ico
2018-03-09T19:15:51.7436764Z ##[debug]uploading file: D:\a\1\s\dist\favicon.ico remote: /favicon.ico
2018-03-09T19:15:51.7436957Z ##[debug]file: D:\a\1\s\dist\index.html
2018-03-09T19:15:51.7437159Z ##[debug]remoteFile: /index.html
2018-03-09T19:15:51.7437373Z ##[debug]uploading file: D:\a\1\s\dist\index.html remote: /index.html
2018-03-09T19:15:51.7437584Z ##[debug]file: D:\a\1\s\dist\inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:51.7437792Z ##[debug]remoteFile: /inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:51.7438030Z ##[debug]uploading file: D:\a\1\s\dist\inline.29d7a9534cdf90ff9fc9.bundle.js remote: /inline.29d7a9534cdf90ff9fc9.bundle.js
2018-03-09T19:15:51.7438260Z ##[debug]file: D:\a\1\s\dist\main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:51.7438483Z ##[debug]remoteFile: /main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:51.7438725Z ##[debug]uploading file: D:\a\1\s\dist\main.8d76a8cead0b0a709f14.bundle.js remote: /main.8d76a8cead0b0a709f14.bundle.js
2018-03-09T19:15:51.7438954Z ##[debug]file: D:\a\1\s\dist\polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:51.7439179Z ##[debug]remoteFile: /polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:51.7439721Z ##[debug]uploading file: D:\a\1\s\dist\polyfills.367619bafcf2473263d3.bundle.js remote: /polyfills.367619bafcf2473263d3.bundle.js
2018-03-09T19:15:51.7440011Z ##[debug]file: D:\a\1\s\dist\styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:51.7440219Z ##[debug]remoteFile: /styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:51.7440450Z ##[debug]uploading file: D:\a\1\s\dist\styles.9c0ad738f18adc3d19ed.bundle.css remote: /styles.9c0ad738f18adc3d19ed.bundle.css
2018-03-09T19:15:51.7468114Z ##[debug][connection] > 'PWD'
2018-03-09T19:15:51.8218617Z ##[debug][connection] < '257 "/" is current directory.\r\n'
2018-03-09T19:15:51.8219120Z ##[debug][parser] < '257 "/" is current directory.\r\n'
2018-03-09T19:15:51.8219407Z ##[debug][parser] Response: code=257, buffer='"/" is current directory.'
2018-03-09T19:15:51.8223415Z ##[debug][connection] > 'CWD \\'
2018-03-09T19:15:51.9520666Z ##[debug][connection] < '250 Directory changed to /\r\n'
2018-03-09T19:15:51.9521187Z ##[debug][parser] < '250 Directory changed to /\r\n'
2018-03-09T19:15:51.9521357Z ##[debug][parser] Response: code=250, buffer='Directory changed to /'
2018-03-09T19:15:51.9521494Z ##[debug][connection] > 'CWD /'
2018-03-09T19:15:52.0393165Z ##[debug][connection] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.0393606Z ##[debug][parser] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.0393880Z ##[debug][parser] Response: code=250, buffer='Directory changed to /'
2018-03-09T19:15:52.0394108Z ##[debug][connection] > 'PWD'
2018-03-09T19:15:52.0396767Z files uploaded: 0, files skipped: 0, directories processed: 1, total: 1, remaining: 15, remote directory successfully created/verified: \
2018-03-09T19:15:52.1464749Z ##[debug][connection] < '257 "/" is current directory.\r\n'
2018-03-09T19:15:52.1465971Z ##[debug][parser] < '257 "/" is current directory.\r\n'
2018-03-09T19:15:52.1466337Z ##[debug][parser] Response: code=257, buffer='"/" is current directory.'
2018-03-09T19:15:52.1466773Z ##[debug][connection] > 'CWD /'
2018-03-09T19:15:52.2544396Z ##[debug][connection] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.2544813Z ##[debug][parser] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.2545240Z ##[debug][parser] Response: code=250, buffer='Directory changed to /'
2018-03-09T19:15:52.2545470Z ##[debug][connection] > 'CWD '
2018-03-09T19:15:52.3285531Z ##[debug][connection] < '501 Syntax error in parameters or arguments.\r\n'
2018-03-09T19:15:52.3286288Z ##[debug][parser] < '501 Syntax error in parameters or arguments.\r\n'
2018-03-09T19:15:52.3286546Z ##[debug][parser] Response: code=501, buffer='Syntax error in parameters or arguments.'
2018-03-09T19:15:52.3287602Z ##[debug][connection] > 'CWD /'
2018-03-09T19:15:52.4042501Z ##[debug][connection] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.4043030Z ##[debug][parser] < '250 Directory changed to /\r\n'
2018-03-09T19:15:52.4043359Z ##[debug][parser] Response: code=250, buffer='Directory changed to /'
2018-03-09T19:15:52.4048939Z ##[debug][connection] > 'PASV'
2018-03-09T19:15:52.4061686Z FTP upload failed: "Unable to create remote directory: / due to error: Error: Syntax error in parameters or arguments.". FTP log: "[connection] < '250 Directory changed to /\r\n',[parser] < '250 Directory changed to /\r\n',[parser] Response: code=250, buffer='Directory changed to /',[connection] > 'PASV'".
2018-03-09T19:15:52.4062912Z host: ftp.site4now.net
2018-03-09T19:15:52.4063176Z path: \
2018-03-09T19:15:52.4063359Z files uploaded: 0
2018-03-09T19:15:52.4063550Z files skipped: 0
2018-03-09T19:15:52.4063728Z directories processed: 1
2018-03-09T19:15:52.4063908Z unprocessed files & directories: 15
2018-03-09T19:15:52.4064305Z ##[debug]task result: Failed
2018-03-09T19:15:52.4120192Z ##[error]Unable to create remote directory: / due to error: Error: Syntax error in parameters or arguments.
2018-03-09T19:15:52.4134628Z ##[debug]Processed: ##vso[task.issue type=error;]Unable to create remote directory: / due to error: Error: Syntax error in parameters or arguments.
2018-03-09T19:15:52.4148033Z ##[debug]Processed: ##vso[task.complete result=Failed;]Unable to create remote directory: / due to error: Error: Syntax error in parameters or arguments.
2018-03-09T19:15:52.4148151Z disconnecting from: ftp.site4now.net
2018-03-09T19:15:52.4900984Z ##[debug][connection] < '227 Entering Passive Mode (208,118,63,179,156,74)\r\n'
2018-03-09T19:15:52.6721119Z ##[debug]ftp client end
2018-03-09T19:15:52.6722738Z disconnected
2018-03-09T19:15:52.6723174Z ##[debug]ftp client close, hadErr:false
2018-03-09T19:15:52.6785246Z ##[section]Finishing: FTP Upload: dist
2018-03-09T19:15:52.6794608Z ##[debug]Evaluating condition for step: 'Post Job Cleanup'
2018-03-09T19:15:52.6794902Z ##[debug]Always run post-job step: 'Post Job Cleanup'
2018-03-09T19:15:52.6795289Z ##[section]Starting: Post Job Cleanup
2018-03-09T19:15:52.6870577Z Cleaning any cached credential from repository: Horse Liberator Website (Git)
2018-03-09T19:15:52.6871062Z ##[debug]Repository url=https://invisiblefury.visualstudio.com/_git/Horse Liberator Website
2018-03-09T19:15:52.6871183Z ##[debug]targetPath=D:\a\1\s
2018-03-09T19:15:52.6871276Z ##[debug]Remove any extraheader, proxy and client cert setting from git config.
2018-03-09T19:15:52.6898713Z ##[debug]Remove injected credential from git remote fetch url.
2018-03-09T19:15:52.6927337Z ##[debug]Set git fetch url to: https://invisiblefury.visualstudio.com/_git/Horse%20Liberator%20Website for remote: origin.
2018-03-09T19:15:52.6927610Z ##[command]git remote set-url origin https://invisiblefury.visualstudio.com/_git/Horse%20Liberator%20Website
2018-03-09T19:15:52.7245453Z ##[debug]Remove injected credential from git remote push url.
2018-03-09T19:15:52.7271374Z ##[debug]Set git push url to: https://invisiblefury.visualstudio.com/_git/Horse%20Liberator%20Website for remote: origin.
2018-03-09T19:15:52.7271555Z ##[command]git remote set-url --push origin https://invisiblefury.visualstudio.com/_git/Horse%20Liberator%20Website
2018-03-09T19:15:52.7590347Z ##[section]Finishing: Post Job Cleanup
2018-03-09T19:15:53.2105864Z ##[section]Finishing: Job

sqoop data from teradata to hdfs using shell script

Getting below exception.
What are the possible errors?
Is it really database does not exist? or related to Permission related issue?
com.teradata.connector.common.exception.ConnectorException: java.sql.SQLException: [Teradata Database] [TeraJDBC 15.00.00.20] [Error 3802] [SQLState 42S02] Database 'P_STORAGE' does not exist.
at com.teradata.jdbc.jdbc_4.util.ErrorFactory.makeDatabaseSQLException(ErrorFactory.java:308)
at com.teradata.jdbc.jdbc_4.statemachine.ReceiveInitSubState.action(ReceiveInitSubState.java:109)
at com.teradata.jdbc.jdbc_4.statemachine.StatementReceiveState.subStateMachine(StatementReceiveState.java:307)
at com.teradata.jdbc.jdbc_4.statemachine.StatementReceiveState.action(StatementReceiveState.java:196)
at com.teradata.jdbc.jdbc_4.statemachine.StatementController.runBody(StatementController.java:123)
at com.teradata.jdbc.jdbc_4.statemachine.StatementController.run(StatementController.java:114)
at com.teradata.jdbc.jdbc_4.TDStatement.executeStatement(TDStatement.java:385)
at com.teradata.jdbc.jdbc_4.TDStatement.prepareRequest(TDStatement.java:569)
at com.teradata.jdbc.jdbc_4.TDPreparedStatement.<init>(TDPreparedStatement.java:117)
at com.teradata.jdbc.jdk6.JDK6_SQL_PreparedStatement.<init>(JDK6_SQL_PreparedStatement.java:29)
at com.teradata.jdbc.jdk6.JDK6_SQL_Connection.constructPreparedStatement(JDK6_SQL_Connection.java:81)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1357)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1401)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1387)
at com.teradata.connector.teradata.db.TeradataConnection.getColumnDescsForSQL(TeradataConnection.java:995)
at com.teradata.connector.teradata.db.TeradataConnection.getColumnNamesForSQL(TeradataConnection.java:940)
at com.teradata.connector.teradata.utils.TeradataUtils.validateInputTeradataProperties(TeradataUtils.java:315)
at com.teradata.connector.teradata.processor.TeradataInputProcessor.validateConfiguration(TeradataInputProcessor.java:91)
at com.teradata.connector.teradata.processor.TeradataSplitByPartitionProcessor.validateConfiguration(TeradataSplitByPartitionProcessor.java:412)
at com.teradata.connector.teradata.processor.TeradataInputProcessor.inputPreProcessor(TeradataInputProcessor.java:36)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:116)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:56)
at org.apache.sqoop.teradata.TeradataSqoopImportHelper.runJob(TeradataSqoopImportHelper.java:370)
at org.apache.sqoop.teradata.TeradataConnManager.importQuery(TeradataConnManager.java:531)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:499)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:140)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:56)
at org.apache.sqoop.teradata.TeradataSqoopImportHelper.runJob(TeradataSqoopImportHelper.java:370)
at org.apache.sqoop.teradata.TeradataConnManager.importQuery(TeradataConnManager.java:531)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:499)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
16/09/23 05:47:10 INFO teradata.TeradataSqoopImportHelper: Teradata import job completed with exit code 1
16/09/23 05:47:10 ERROR tool.ImportTool: Encountered IOException running import job: java.io.IOException: Exception running Teradata import job
at org.apache.sqoop.teradata.TeradataSqoopImportHelper.runJob(TeradataSqoopImportHelper.java:373)
at org.apache.sqoop.teradata.TeradataConnManager.importQuery(TeradataConnManager.java:531)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:499)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
Caused by: com.teradata.connector.common.exception.ConnectorException: java.sql.SQLException: [Teradata Database] [TeraJDBC 15.00.00.20] [Error 3802] [SQLState 42S02] Database 'P_STORAGE' does not exist.
at com.teradata.jdbc.jdbc_4.util.ErrorFactory.makeDatabaseSQLException(ErrorFactory.java:308)
at com.teradata.jdbc.jdbc_4.statemachine.ReceiveInitSubState.action(ReceiveInitSubState.java:109)
at com.teradata.jdbc.jdbc_4.statemachine.StatementReceiveState.subStateMachine(StatementReceiveState.java:307)
at com.teradata.jdbc.jdbc_4.statemachine.StatementReceiveState.action(StatementReceiveState.java:196)
at com.teradata.jdbc.jdbc_4.statemachine.StatementController.runBody(StatementController.java:123)
at com.teradata.jdbc.jdbc_4.statemachine.StatementController.run(StatementController.java:114)
at com.teradata.jdbc.jdbc_4.TDStatement.executeStatement(TDStatement.java:385)
at com.teradata.jdbc.jdbc_4.TDStatement.prepareRequest(TDStatement.java:569)
at com.teradata.jdbc.jdbc_4.TDPreparedStatement.<init>(TDPreparedStatement.java:117)
at com.teradata.jdbc.jdk6.JDK6_SQL_PreparedStatement.<init>(JDK6_SQL_PreparedStatement.java:29)
at com.teradata.jdbc.jdk6.JDK6_SQL_Connection.constructPreparedStatement(JDK6_SQL_Connection.java:81)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1357)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1401)
at com.teradata.jdbc.jdbc_4.TDSession.prepareStatement(TDSession.java:1387)
at com.teradata.connector.teradata.db.TeradataConnection.getColumnDescsForSQL(TeradataConnection.java:995)
at com.teradata.connector.teradata.db.TeradataConnection.getColumnNamesForSQL(TeradataConnection.java:940)
at com.teradata.connector.teradata.utils.TeradataUtils.validateInputTeradataProperties(TeradataUtils.java:315)
at com.teradata.connector.teradata.processor.TeradataInputProcessor.validateConfiguration(TeradataInputProcessor.java:91)
at com.teradata.connector.teradata.processor.TeradataSplitByPartitionProcessor.validateConfiguration(TeradataSplitByPartitionProcessor.java:412)
at com.teradata.connector.teradata.processor.TeradataInputProcessor.inputPreProcessor(TeradataInputProcessor.java:36)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:116)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:56)
at org.apache.sqoop.teradata.TeradataSqoopImportHelper.runJob(TeradataSqoopImportHelper.java:370)
at org.apache.sqoop.teradata.TeradataConnManager.importQuery(TeradataConnManager.java:531)
at org.apache.sqoop.tool.ImportTool.importTable(ImportTool.java:499)
at org.apache.sqoop.tool.ImportTool.run(ImportTool.java:605)
at org.apache.sqoop.Sqoop.run(Sqoop.java:148)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
at org.apache.sqoop.Sqoop.runSqoop(Sqoop.java:184)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:226)
at org.apache.sqoop.Sqoop.runTool(Sqoop.java:235)
at org.apache.sqoop.Sqoop.main(Sqoop.java:244)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:140)
at com.teradata.connector.common.tool.ConnectorJobRunner.runJob(ConnectorJobRunner.java:56)
at org.apache.sqoop.teradata.TeradataSqoopImportHelper.runJob(TeradataSqoopImportHelper.java:370)
... 9 more
It could be either issue.
Please try sqoop eval with the same credentials:
sqoop eval --connect "<teradata connect class param>" \
--username <username> -P
--query "SELECT * FROM DBC.Databases"

NLP NER RegexNER Can I include annotators using NERClassifierCombiner

Can I add the call to the properties file:
java -cp stanford-corenlp-3.5.2.jar -mx1g edu.stanford.nlp.ie.NERClassifierCombiner -loadClassifier c2is2.serialized.ncc.ncc.ser.gz -map word=0,answer=1 -props c2is2nlp.props -textFile c2is2r3.txt
Here is the error stack:
NERClassifierCombiner invoked on Mon Jul 20 13:08:20 EDT 2015 with arguments:
-loadClassifier c2is2.serialized.ncc.ncc.ser.gz -map word=0,answer=1 -props c2is2nlp.props -textFile c2is2r3.txt
loadClassifier=c2is2.serialized.ncc.ncc.ser.gz
regexner.mapping=c2is2Mapping.tab
Unknown property: |regexner.mapping|
textFile=c2is2r3.txt
map=word=0,answer=1
annotators=tokenize, ssplit, pos, lemma, ner, parse, dcoref
Unknown property: |annotators|
map=word=0,answer=1
Do I need to add the following:
edu.stanford.nlp.pipeline.StanfordCoreNLP [ -props <YOUR CONFIGURATION FILE> ]
Here:
java -cp stanford-corenlp-3.5.2.jar -mx1g edu.stanford.nlp.ie.NERClassifierCombiner -loadClassifier c2is2.serialized.ncc.ncc.ser.gz -map word=0,answer=1 -textFile c2is2r3.txt
Here are the working steps:
Token file
more c2is2r3.tsv
The O
fate O
of O
Lehman ORGANIZATION
Brothers ORGANIZATION
. . .
New ORGANIZATION
York ORGANIZATION
Fed ORGANIZATION
, O
and O
Treasury TITLE
Secretary TITLE
Henry PERSON
M. PERSON
Paulson PERSON
Jr. PERSON
. O
Property file:
more c2is2r3.prop
trainFile = c2is2r3.tsv
serializeTo = c2is2r3-ner-model.ser.gz
map = word=0,answer=1
useClassFeature=true
useWord=true
useNGrams=true
noMidNGrams=true
maxNGramLeng=6
usePrev=true
useNext=true
useSequences=true
usePrevSequences=true
maxLeft=1
useTypeSeqs=true
useTypeSeqs2=true
useTypeySequences=true
wordShape=chris2useLC
useDisjunctive=true
This is the Custom Classifier:
java -cp stanford-corenlp-3.5.2.jar edu.stanford.nlp.ie.crf.CRFClassifier -prop c2is2r3.prop
Combining models
java -cp stanford-corenlp-3.5.2.jar -mx2g edu.stanford.nlp.ie.NERClassifierCombiner -ner.model c2is2r3-ner-model.ser.gz,classifiers/english.muc.7class.distsim.crf.ser.gz -ner.useSUTime false -ner.combinationMode HIGH_RECALL -serializeTo c2is2.serialized.ncc.ncc.ser.gz
Testing
java -cp stanford-corenlp-3.5.2.jar -mx1g edu.stanford.nlp.ie.NERClassifierCombiner -loadClassifier c2is2.serialized.ncc.ncc.ser.gz -map word=0,answer=1 -textFile c2is2r3.txt

How to use defaults write hexa values with -data on OS X?

I'm playing with some "defaults write" for my dotfiles & can found a way to write hexa values.
When I do a read I get something like this
$ defaults read com.ragingmenace.MenuMeters CPUSystemColor
<040b7374 7265616d 74797065 6481e803 84014084 8484074e 53436f6c 6f720084 84084e53 4f626a65 63740085 84016301 84046666 6666831b 677c3f00 83d70ea8 3d0186>
So I tried this commands without success (after checking the man defaults)
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data "<040b7374 7265616d 74797065 6481e803 84014084 8484074e 53436f6c 6f720084 84084e53 4f626a65 63740085 84016301 84046666 66668364 79783f83 1b677c3f 83bf8073 3f0186>"
Command line interface to a user's defaults.
Syntax:
'defaults' [-currentHost | -host <hostname>] followed by one of the following:
read shows all defaults
read <domain> shows defaults for given domain
read <domain> <key> shows defaults for given domain, key
read-type <domain> <key> shows the type for the given domain, key
write <domain> <domain_rep> writes domain (overwrites existing)
write <domain> <key> <value> writes key for domain
rename <domain> <old_key> <new_key> renames old_key to new_key
delete <domain> deletes domain
delete <domain> <key> deletes key in domain
import <domain> <path to plist> writes the plist at path to domain
import <domain> - writes a plist from stdin to domain
export <domain> <path to plist> saves domain as a binary plist to path
export <domain> - writes domain as an xml plist to stdout
domains lists all domains
find <word> lists all entries containing word
help print this help
<domain> is ( <domain_name> | -app <application_name> | -globalDomain )
or a path to a file omitting the '.plist' extension
<value> is one of:
<value_rep>
-string <string_value>
-data <hex_digits>
-int[eger] <integer_value>
-float <floating-point_value>
-bool[ean] (true | false | yes | no)
-date <date_rep>
-array <value1> <value2> ...
-array-add <value1> <value2> ...
-dict <key1> <value1> <key2> <value2> ...
-dict-add <key1> <value1> ...
You can see that the format seems unrecognized.
So some variation without success
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data <040b73747265616d747970656481e803840140848484074e53436f6c6f72008484084e534f626a656374008584016301840466666666836479783f831b677c3f83bf80733f0186>
# same error as the first one
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data "<040b73747265616d747970656481e803840140848484074e53436f6c6f72008484084e534f626a656374008584016301840466666666836479783f831b677c3f83bf80733f0186>"
# same error as the first one
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data <040b7374 7265616d 74797065 6481e803 84014084 8484074e 53436f6c 6f720084 84084e53 4f626a65 63740085 84016301 84046666 66668364 79783f83 1b677c3f 83bf8073 3f0186>
zsh: parse error near `\n'
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data <'040b7374 7265616d 74797065 6481e803 84014084 8484074e 53436f6c 6f720084 84084e53 4f626a65 63740085 84016301 84046666 66668364 79783f83 1b677c3f 83bf8073 3f0186'>
zsh: parse error near `\n'
Any idea how to get this working ?
It's really simple, just remove <, > & all spaces.
$ defaults write com.ragingmenace.MenuMeters CPUSystemColor -data "040b73747265616d747970656481e803840140848484074e53436f6c6f72008484084e534f626a656374008584016301840466666666836479783f831b677c3f83bf80733f0186"
You can verify it's working by doing a defaults read
$ defaults read com.ragingmenace.MenuMeters CPUSystemColor
<040b7374 7265616d 74797065 6481e803 84014084 8484074e 53436f6c 6f720084 84084e53 4f626a65 63740085 84016301 84046666 6666831b 677c3f00 83d70ea8 3d0186>

Resources