[ https://issues.apache.org/jira/browse/CARBONDATA-1237?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Chetan Bhat updated CARBONDATA-1237: ------------------------------------ Description: Steps: Configure enable.unsafe.columnpage = true. Create table having timestamp column. Load data - 2 loads using different csv each csv having unique data. 0: jdbc:hive2://10.19.91.225:22550/default> CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='CUST_NAME,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1,CUST_ID','sort_columns'='CUST_NAME'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (1.017 seconds) 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split1.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (10.798 seconds) 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split2.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (16.006 seconds) Actual Result(Issue ) - UndeclaredThrowableException created on timestamp column. Error: java.lang.reflect.UndeclaredThrowableException (state=,code=0) Expected Result : UndeclaredThrowableException should not be thrown. was: Steps: Configure enable.unsafe.columnpage = true. Create table having timestamp column. Load data - 2 loads using different csv each csv having unique data. Try to create secondary index on timestamp column. 0: jdbc:hive2://10.19.91.225:22550/default> CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='CUST_NAME,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1,CUST_ID','sort_columns'='CUST_NAME'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (1.017 seconds) 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split1.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (10.798 seconds) 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split2.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); +---------+--+ | Result | +---------+--+ +---------+--+ No rows selected (16.006 seconds) 0: jdbc:hive2://10.19.91.225:22550/default> CREATE INDEX indextable2 ON TABLE uniqdata (DOB) AS 'org.apache.carbondata.format'; Actual Result(Issue ) - UndeclaredThrowableException thrown when secondary index tried to be created on timestamp column. 0: jdbc:hive2://10.19.91.225:22550/default> CREATE INDEX indextable2 ON TABLE uniqdata (DOB) AS 'org.apache.carbondata.format'; Error: java.lang.reflect.UndeclaredThrowableException (state=,code=0) Driver Log - 2017-06-27 14:06:34,557 | INFO | [pool-23-thread-83] | Parsing command: string | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,557 | INFO | [pool-23-thread-83] | Parsing command: timestamp | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,557 | INFO | [pool-23-thread-83] | Parsing command: timestamp | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,557 | INFO | [pool-23-thread-83] | Parsing command: bigint | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,558 | INFO | [pool-23-thread-83] | Parsing command: bigint | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,558 | INFO | [pool-23-thread-83] | Parsing command: decimal(30,10) | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,558 | INFO | [pool-23-thread-83] | Parsing command: decimal(36,10) | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,558 | INFO | [pool-23-thread-83] | Parsing command: double | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,559 | INFO | [pool-23-thread-83] | Parsing command: double | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,559 | INFO | [pool-23-thread-83] | Parsing command: int | org.apache.spark.internal.Logging$class.logInfo(Logging.scala:54) 2017-06-27 14:06:34,559 | INFO | [pool-23-thread-83] | Closed a connection to metastore, current connections: 0 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:622) 2017-06-27 14:06:34,560 | INFO | [pool-23-thread-83] | Trying to connect to metastore with URI thrift://10.19.91.224:21088 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:469) 2017-06-27 14:06:34,585 | INFO | [pool-23-thread-83] | Opened a connection to metastore, current connections: 1 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:514) 2017-06-27 14:06:34,585 | INFO | [pool-23-thread-83] | Connected to metastore. | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:593) 2017-06-27 14:06:34,793 | AUDIT | [pool-23-thread-83] | [linux-225][chetan][Thread-776]Index creation with Database name [default] and Index name [indextable2] failed | org.apache.carbondata.common.logging.impl.StandardLogService.audit(StandardLogService.java:207) 2017-06-27 14:06:34,793 | INFO | [pool-23-thread-83] | OperationId=75f45d62-5e90-41f8-8e61-186a5e8e58e3 Result=FAIL | org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:320) 2017-06-27 14:06:34,793 | ERROR | [pool-23-thread-83] | Error executing query, currentState RUNNING, | org.apache.spark.internal.Logging$class.logError(Logging.scala:91) java.lang.reflect.UndeclaredThrowableException at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1787) at org.apache.spark.util.Utils$.proxyOperate(Utils.scala:2688) at org.apache.spark.util.ACLFileUtils$.changeOwnerRecursivelyAfterOperation(ACLFileUtils.scala:57) at org.apache.spark.sql.execution.command.CreateIndexTable.run(carbonTableSchema.scala:359) at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:59) at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:57) at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:75) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:125) at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:125) at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:699) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:251) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:183) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:180) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1769) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1.run(SparkExecuteStatementOperation.scala:193) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:748) Caused by: java.util.concurrent.ExecutionException: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 98.0 failed 4 times, most recent failure: Lost task 0.3 in stage 98.0 (TID 199, linux-224, executor 30): ExecutorLostFailure (executor 30 exited caused by one of the running tasks) Reason: Container marked as failed: container_1497850829760_1804_01_000042 on host: linux-224. Exit status: 134. Diagnostics: Exception from container-launch. Container id: container_1497850829760_1804_01_000042 Exit code: 134 Stack trace: ExitCodeException exitCode=134: at org.apache.hadoop.util.Shell.runCommand(Shell.java:628) at org.apache.hadoop.util.Shell.run(Shell.java:525) at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:819) at org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor.launchContainer(LinuxContainerExecutor.java:388) at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:313) at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:88) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:748) Shell output: main : command provided 1 main : run as user is spark2x main : requested yarn user is spark2x Before: Going to create dir Container exited with a non-zero exit code 134 and the last 4096 bytes from the error logs are : SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/opt/huawei/Bigdata/FusionInsight_Spark2x_V100R002C70/install/FusionInsight-Spark2x-2.1/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/srv/BigData/hadoop/data1/nm/localdir/filecache/11/spark-archive-2x.zip/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/opt/huawei/Bigdata/FusionInsight_HD_V100R002C70/install/FusionInsight-Hadoop-2.7.2/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] Driver stacktrace: at java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.util.concurrent.FutureTask.get(FutureTask.java:192) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$$anonfun$createSecondaryIndex$1$$anonfun$apply$mcV$sp$1.apply(SecondaryIndexCreator.scala:106) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$$anonfun$createSecondaryIndex$1$$anonfun$apply$mcV$sp$1.apply(SecondaryIndexCreator.scala:105) at scala.collection.immutable.List.foreach(List.scala:381) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$$anonfun$createSecondaryIndex$1.apply$mcV$sp(SecondaryIndexCreator.scala:105) at scala.util.control.Breaks.breakable(Breaks.scala:38) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$.createSecondaryIndex(SecondaryIndexCreator.scala:103) at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.secondaryIndexCallable(CarbonDataRDDFactory.scala:156) at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.createSecondaryIndex(CarbonDataRDDFactory.scala:103) at org.apache.spark.sql.execution.command.loadDataForSecondaryIndex.run(carbonTableSchema.scala:491) at org.apache.spark.sql.execution.command.CreateIndexTable$$anonfun$run$2.apply$mcV$sp(carbonTableSchema.scala:361) at org.apache.spark.util.ACLFileUtils$$anonfun$changeOwnerRecursivelyAfterOperation$1.apply$mcV$sp(ACLFileUtils.scala:71) at org.apache.spark.util.Utils$$anon$7.run(Utils.scala:2690) at org.apache.spark.util.Utils$$anon$7.run(Utils.scala:2688) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1769) ... 30 more Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 98.0 failed 4 times, most recent failure: Lost task 0.3 in stage 98.0 (TID 199, linux-224, executor 30): ExecutorLostFailure (executor 30 exited caused by one of the running tasks) Reason: Container marked as failed: container_1497850829760_1804_01_000042 on host: linux-224. Exit status: 134. Diagnostics: Exception from container-launch. Container id: container_1497850829760_1804_01_000042 Exit code: 134 Stack trace: ExitCodeException exitCode=134: at org.apache.hadoop.util.Shell.runCommand(Shell.java:628) at org.apache.hadoop.util.Shell.run(Shell.java:525) at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:819) at org.apache.hadoop.yarn.server.nodemanager.LinuxContainerExecutor.launchContainer(LinuxContainerExecutor.java:388) at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:313) at org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.call(ContainerLaunch.java:88) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:748) Shell output: main : command provided 1 main : run as user is spark2x main : requested yarn user is spark2x Before: Going to create dir Container exited with a non-zero exit code 134 and the last 4096 bytes from the error logs are : SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/opt/huawei/Bigdata/FusionInsight_Spark2x_V100R002C70/install/FusionInsight-Spark2x-2.1/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/srv/BigData/hadoop/data1/nm/localdir/filecache/11/spark-archive-2x.zip/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/opt/huawei/Bigdata/FusionInsight_HD_V100R002C70/install/FusionInsight-Hadoop-2.7.2/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1478) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1466) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1465) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1465) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:813) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:813) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:813) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1693) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1648) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1637) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:639) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1946) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1959) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1972) at org.apache.spark.SparkContext.runJob(SparkContext.scala:1986) at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:937) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.collect(RDD.scala:936) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$$anonfun$createSecondaryIndex$2$$anon$1.call(SecondaryIndexCreator.scala:92) at org.apache.carbondata.spark.rdd.SecondaryIndexCreator$$anonfun$createSecondaryIndex$2$$anon$1.call(SecondaryIndexCreator.scala:77) ... 4 more 2017-06-27 14:06:34,794 | ERROR | [pool-23-thread-83] | Error running hive query: | org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:187) org.apache.hive.service.cli.HiveSQLException: java.lang.reflect.UndeclaredThrowableException at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:333) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:183) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:180) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1769) at org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1.run(SparkExecuteStatementOperation.scala:193) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:748) 2017-06-27 14:06:36,996 | INFO | [main] | the jdbc URL switch open:jdbc:hive2://10.19.91.225:22550/default;principal=spark2x/[hidden email];healthcheck=true;saslQop=auth-conf;auth=KERBEROS;user.principal=spark2x/[hidden email];user.keytab=/opt/huawei/Bigdata/FusionInsight_Spark2x_V100R002C70/install/FusionInsight-Spark2x-2.1/keytab/spark2x/JDBCServer/spark2x.keytab | org.apache.hive.jdbc.HiveConnection.<init>(HiveConnection.java:145) 2017-06-27 14:06:37,003 | INFO | [main] | Supplied authorities: 10.19.91.225:22550 | org.apache.hive.jdbc.Utils.parseURL(Utils.java:391) 2017-06-27 14:06:37,625 | WARN | [main] | HiveConf of name hive.thriftProxy.user does not exist | org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2920) 2017-06-27 14:06:37,625 | WARN | [main] | HiveConf of name hive.thriftProxy.address does not exist | org.apache.hadoop.hive.conf.HiveConf.initialize(HiveConf.java:2920) 2017-06-27 14:06:37,629 | INFO | [main] | Resolved authority: 10.19.91.225:22550 | org.apache.hive.jdbc.Utils.parseURL(Utils.java:490) 2017-06-27 14:06:37,785 | WARN | [main] | Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | org.apache.hadoop.util.NativeCodeLoader.<clinit>(NativeCodeLoader.java:62) 2017-06-27 14:06:38,050 | INFO | [main] | user login success. | org.apache.hive.jdbc.HiveConnection.login(HiveConnection.java:295) 2017-06-27 14:06:38,118 | INFO | [main] | Will try to open client transport with JDBC Uri: jdbc:hive2://10.19.91.225:22550/default;principal=spark2x/[hidden email];healthcheck=true;saslQop=auth-conf;auth=KERBEROS;user.principal=spark2x/[hidden email];user.keytab=/opt/huawei/Bigdata/FusionInsight_Spark2x_V100R002C70/install/FusionInsight-Spark2x-2.1/keytab/spark2x/JDBCServer/spark2x.keytab | org.apache.hive.jdbc.HiveConnection.openTransport(HiveConnection.java:317) 2017-06-27 14:06:38,177 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V8 | org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:461) 2017-06-27 14:06:38,177 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Trying to connect to metastore with URI thrift://10.19.91.224:21088 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:469) 2017-06-27 14:06:38,180 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Opened a connection to metastore, current connections: 38 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:514) 2017-06-27 14:06:38,181 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Connected to metastore. | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:593) 2017-06-27 14:06:38,245 | WARN | [HiveServer2-Handler-Pool: Thread-258] | load mapred-default.xml, HIVE_CONF_DIR env not found! | org.apache.hadoop.hive.ql.session.SessionState.loadMapredDefaultXml(SessionState.java:1101) 2017-06-27 14:06:38,246 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Closed a connection to metastore, current connections: 37 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:622) 2017-06-27 14:06:38,247 | INFO | [HiveServer2-Handler-Pool: Thread-258] | Trying to connect to metastore with URI thrift://10.19.91.224:21088 | org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:469) Expected Result : Secondary index creation on timestamp column should be success. Summary: UndeclaredThrowableException thrown with enable.unsafe.columnpage = true (was: UndeclaredThrowableException thrown when secondary index created on timestamp column for multiple loads(unique data) with enable.unsafe.columnpage = true) > UndeclaredThrowableException thrown with enable.unsafe.columnpage = true > ------------------------------------------------------------------------ > > Key: CARBONDATA-1237 > URL: https://issues.apache.org/jira/browse/CARBONDATA-1237 > Project: CarbonData > Issue Type: Bug > Components: data-query > Affects Versions: 1.1.0 > Environment: SUSE 11 SP4 3 node cluster. > Reporter: Chetan Bhat > Assignee: QiangCai > > Steps: > Configure enable.unsafe.columnpage = true. > Create table having timestamp column. > Load data - 2 loads using different csv each csv having unique data. > 0: jdbc:hive2://10.19.91.225:22550/default> CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='128','include_dictionary'='CUST_NAME,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1,CUST_ID','sort_columns'='CUST_NAME'); > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (1.017 seconds) > 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split1.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (10.798 seconds) > 0: jdbc:hive2://10.19.91.225:22550/default> LOAD DATA INPATH 'hdfs://hacluster/chetan/split2.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (16.006 seconds) > Actual Result(Issue ) - UndeclaredThrowableException created on timestamp column. > Error: java.lang.reflect.UndeclaredThrowableException (state=,code=0) > Expected Result : UndeclaredThrowableException should not be thrown. -- This message was sent by Atlassian JIRA (v6.4.14#64029) |
Free forum by Nabble | Edit this page |