[ https://issues.apache.org/jira/browse/CARBONDATA-1147?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] anubhav tarar reassigned CARBONDATA-1147: ----------------------------------------- Assignee: anubhav tarar > Error for insert data select from hive table > -------------------------------------------- > > Key: CARBONDATA-1147 > URL: https://issues.apache.org/jira/browse/CARBONDATA-1147 > Project: CarbonData > Issue Type: Bug > Components: spark-integration > Affects Versions: 1.1.0 > Environment: HDP version2.6 > Spark version 2.1.0.2.6.0.3-8 > Scala version 2.11.8 > Java version 1.8.0_112 > Hive version 1.2.1.2.6 > CarbonData version 1.1.0 (carbondata_2.11-1.1.0-shade-hadoop2.7.3.jar) > Reporter: lonly > Assignee: anubhav tarar > Priority: Critical > > scala> carbon.sql("insert into table carbon.internet_cafes_user_basic_carbon select * from rzx_dmp.internet_cafes_user_basic_orc") > [Stage 2:> (0 + 4) / 18]17/06/09 15:34:11 WARN TaskSetManager: Lost task 0.0 in stage 2.0 (TID 2, hmly12, executor 2): java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:82) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at java.lang.Class.forName0(Native Method) > at java.lang.Class.forName(Class.java:348) > at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67) > at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1620) > at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) > at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) > at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75) > at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) > at org.apache.spark.scheduler.Task.run(Task.scala:99) > at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322) > at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > at java.lang.Thread.run(Thread.java:745) > Caused by: java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at java.lang.ClassLoader.findClass(ClassLoader.java:530) > at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:77) > ... 23 more > 17/06/09 15:34:11 ERROR TaskSetManager: Task 0 in stage 2.0 failed 4 times; aborting job > 17/06/09 15:34:11 ERROR GlobalDictionaryUtil$: main generate global dictionary failed > org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 2.0 failed 4 times, most recent failure: Lost task 0.3 in stage 2.0 (TID 11, hmly12, executor 2): java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:82) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at java.lang.Class.forName0(Native Method) > at java.lang.Class.forName(Class.java:348) > at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67) > at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1620) > at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) > at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) > at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75) > at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) > at org.apache.spark.scheduler.Task.run(Task.scala:99) > at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322) > at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > at java.lang.Thread.run(Thread.java:745) > Caused by: java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at java.lang.ClassLoader.findClass(ClassLoader.java:530) > at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:77) > ... 23 more > Driver stacktrace: > at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435) > at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423) > at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422) > at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) > at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) > at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422) > at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) > at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802) > at scala.Option.foreach(Option.scala:257) > at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802) > at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650) > at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) > at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) > at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) > at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628) > at org.apache.spark.SparkContext.runJob(SparkContext.scala:1925) > at org.apache.spark.SparkContext.runJob(SparkContext.scala:1938) > at org.apache.spark.SparkContext.runJob(SparkContext.scala:1951) > at org.apache.spark.SparkContext.runJob(SparkContext.scala:1965) > at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:936) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) > at org.apache.spark.rdd.RDD.withScope(RDD.scala:362) > at org.apache.spark.rdd.RDD.collect(RDD.scala:935) > at org.apache.carbondata.spark.util.GlobalDictionaryUtil$.generateGlobalDictionary(GlobalDictionaryUtil.scala:746) > at org.apache.spark.sql.execution.command.LoadTable.run(carbonTableSchema.scala:568) > at org.apache.spark.sql.execution.command.LoadTableByInsert.run(carbonTableSchema.scala:304) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) > at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92) > at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92) > at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) > at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) > at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:34) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:39) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:41) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:43) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:45) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:47) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:49) > at $line21.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:51) > at $line21.$read$$iw$$iw$$iw$$iw.<init>(<console>:53) > at $line21.$read$$iw$$iw$$iw.<init>(<console>:55) > at $line21.$read$$iw$$iw.<init>(<console>:57) > at $line21.$read$$iw.<init>(<console>:59) > at $line21.$read.<init>(<console>:61) > at $line21.$read$.<init>(<console>:65) > at $line21.$read$.<clinit>(<console>) > at $line21.$eval$.$print$lzycompute(<console>:7) > at $line21.$eval$.$print(<console>:6) > at $line21.$eval.$print(<console>) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) > at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) > at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) > at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) > at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) > at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) > at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) > at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) > at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) > at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) > at org.apache.spark.repl.Main$.doMain(Main.scala:69) > at org.apache.spark.repl.Main$.main(Main.scala:52) > at org.apache.spark.repl.Main.main(Main.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:745) > at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) > at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Caused by: java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:82) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at java.lang.Class.forName0(Native Method) > at java.lang.Class.forName(Class.java:348) > at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:67) > at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1620) > at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) > at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) > at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) > at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) > at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) > at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75) > at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:85) > at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:53) > at org.apache.spark.scheduler.Task.run(Task.scala:99) > at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:322) > at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > at java.lang.Thread.run(Thread.java:745) > Caused by: java.lang.ClassNotFoundException: org.apache.carbondata.spark.rdd.CarbonBlockDistinctValuesCombineRDD > at java.lang.ClassLoader.findClass(ClassLoader.java:530) > at org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > at org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30) > at org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:77) > ... 23 more > 17/06/09 15:34:11 ERROR LoadTable: main > java.lang.Exception: generate global dictionary failed, null > at org.apache.carbondata.spark.util.GlobalDictionaryUtil$.generateGlobalDictionary(GlobalDictionaryUtil.scala:790) > at org.apache.spark.sql.execution.command.LoadTable.run(carbonTableSchema.scala:568) > at org.apache.spark.sql.execution.command.LoadTableByInsert.run(carbonTableSchema.scala:304) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) > at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92) > at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92) > at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) > at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) > at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:34) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:39) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:41) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:43) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:45) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:47) > at $line21.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:49) > at $line21.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:51) > at $line21.$read$$iw$$iw$$iw$$iw.<init>(<console>:53) > at $line21.$read$$iw$$iw$$iw.<init>(<console>:55) > at $line21.$read$$iw$$iw.<init>(<console>:57) > at $line21.$read$$iw.<init>(<console>:59) > at $line21.$read.<init>(<console>:61) > at $line21.$read$.<init>(<console>:65) > at $line21.$read$.<clinit>(<console>) > at $line21.$eval$.$print$lzycompute(<console>:7) > at $line21.$eval$.$print(<console>:6) > at $line21.$eval.$print(<console>) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) > at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) > at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) > at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) > at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) > at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) > at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) > at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) > at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) > at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) > at org.apache.spark.repl.Main$.doMain(Main.scala:69) > at org.apache.spark.repl.Main$.main(Main.scala:52) > at org.apache.spark.repl.Main.main(Main.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:745) > at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) > at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > 17/06/09 15:34:11 AUDIT LoadTable: [hmly12][root][Thread-1]Dataload failure for carbon.internet_cafes_user_basic_carbon. Please check the logs > java.lang.Exception: generate global dictionary failed, null > at org.apache.carbondata.spark.util.GlobalDictionaryUtil$.generateGlobalDictionary(GlobalDictionaryUtil.scala:790) > at org.apache.spark.sql.execution.command.LoadTable.run(carbonTableSchema.scala:568) > at org.apache.spark.sql.execution.command.LoadTableByInsert.run(carbonTableSchema.scala:304) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) > at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92) > at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92) > at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) > at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) > at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) > ... 52 elided -- This message was sent by Atlassian JIRA (v6.4.14#64029) |
Free forum by Nabble | Edit this page |