http://apache-carbondata-dev-mailing-list-archive.168.s1.nabble.com/LONG-STRING-COLUMNS-dont-t-have-effect-tp76493p76514.html
Another bug about path.
carbondata get hdfs path use File.separator. The File.separator is "\" in
> Exception in thread "main" org.apache.spark.sql.AnalysisException:
> java.lang.RuntimeException: java.lang.RuntimeException: Error while running
> command to get file permissions : java.io.IOException: (null) entry in
> command string: null ls -F E:\tmp\hive
> at
> org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:770)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
> at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:659)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:634)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:599)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:180)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:114)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:385)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:287)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.externalCatalog(CarbonSessionState.scala:232)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog$lzycompute(CarbonSessionState.scala:219)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog(CarbonSessionState.scala:217)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.analyzer(CarbonSessionState.scala:244)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
> at
> org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
> at
> org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74)
> at
> org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:423)
> at
> example.spark.CarbondataStreamingConfigTest$.main(CarbondataStreamingConfigTest.scala:37)
> at
> example.spark.CarbondataStreamingConfigTest.main(CarbondataStreamingConfigTest.scala)
> ;
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.externalCatalog(CarbonSessionState.scala:232)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog$lzycompute(CarbonSessionState.scala:219)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog(CarbonSessionState.scala:217)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.analyzer(CarbonSessionState.scala:244)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
> at
> org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
> at
> org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74)
> at
> org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:423)
> at
> example.spark.CarbondataStreamingConfigTest$.main(CarbondataStreamingConfigTest.scala:37)
> at
> example.spark.CarbondataStreamingConfigTest.main(CarbondataStreamingConfigTest.scala)
> Caused by: java.lang.RuntimeException: java.lang.RuntimeException: Error
> while running command to get file permissions : java.io.IOException: (null)
> entry in command string: null ls -F E:\tmp\hive
> at
> org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:770)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
> at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:659)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:634)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:599)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:180)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:114)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:385)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:287)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.externalCatalog(CarbonSessionState.scala:232)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog$lzycompute(CarbonSessionState.scala:219)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog(CarbonSessionState.scala:217)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.analyzer(CarbonSessionState.scala:244)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
> at
> org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
> at
> org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74)
> at
> org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:423)
> at
> example.spark.CarbondataStreamingConfigTest$.main(CarbondataStreamingConfigTest.scala:37)
> at
> example.spark.CarbondataStreamingConfigTest.main(CarbondataStreamingConfigTest.scala)
>
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:180)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:114)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:385)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:287)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
> ... 18 more
> Caused by: java.lang.RuntimeException: Error while running command to get
> file permissions : java.io.IOException: (null) entry in command string:
> null ls -F E:\tmp\hive
> at
> org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:770)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:866)
> at org.apache.hadoop.util.Shell.execCommand(Shell.java:849)
> at org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:659)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:634)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:599)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:180)
> at
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:114)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:385)
> at
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:287)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
> at
> org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:114)
> at
> org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:102)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.externalCatalog(CarbonSessionState.scala:232)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog$lzycompute(CarbonSessionState.scala:219)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.catalog(CarbonSessionState.scala:217)
> at
> org.apache.spark.sql.hive.CarbonSessionStateBuilder.analyzer(CarbonSessionState.scala:244)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.BaseSessionStateBuilder$$anonfun$build$2.apply(BaseSessionStateBuilder.scala:293)
> at
> org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:79)
> at
> org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:79)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
> at
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
> at
> org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74)
> at
> org.apache.spark.sql.SparkSession.createDataFrame(SparkSession.scala:423)
> at
> example.spark.CarbondataStreamingConfigTest$.main(CarbondataStreamingConfigTest.scala:37)
> at
> example.spark.CarbondataStreamingConfigTest.main(CarbondataStreamingConfigTest.scala)
>
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:699)
> at
> org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:634)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:599)
> at
> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)
> at
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)
> ... 33 more
>
>
> xm_zzc <
[hidden email]> 于2019年3月26日周二 下午4:16写道:
>
>> Generally we use linux as development env. But on window system it should
>> be
>> ok.
>> Can you show the detailed problem which you met on window system?
>> @xuchuanyin, please help.
>>
>>
>>
>> --
>> Sent from:
>>
http://apache-carbondata-dev-mailing-list-archive.1130556.n5.nabble.com/>>
>