[ https://issues.apache.org/jira/browse/CARBONDATA-944?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Liang Chen resolved CARBONDATA-944. ----------------------------------- Resolution: Fixed Fix Version/s: 1.1.0 > Fix wrong log info during drop table in spark-shell > --------------------------------------------------- > > Key: CARBONDATA-944 > URL: https://issues.apache.org/jira/browse/CARBONDATA-944 > Project: CarbonData > Issue Type: Improvement > Components: sql > Reporter: Liang Chen > Assignee: Bhavya Aggarwal > Priority: Minor > Fix For: 1.1.0 > > Time Spent: 1h 40m > Remaining Estimate: 0h > > In Spark-shell, use the below script to drop table "t3", the drop operation can be finished successfully , but show the below wrong log message: > scala> carbon.sql("drop table t3") > 17/04/17 23:00:08 AUDIT CarbonDropTableCommand: [AppledeMacBook-Pro.local][apple][Thread-1]Deleting table [t3] under database [default] > 17/04/17 23:00:09 AUDIT CreateTable: [AppledeMacBook-Pro.local][apple][Thread-1]Creating Table with Database name [default] and Table name [t3] > 17/04/17 23:00:09 AUDIT CreateTable: [AppledeMacBook-Pro.local][apple][Thread-1]Table creation with Database name [default] and Table name [t3] failed. Table [t3] already exists under database [default] > 17/04/17 23:00:09 WARN DropTableCommand: org.spark_project.guava.util.concurrent.UncheckedExecutionException: java.lang.RuntimeException: Table [t3] already exists under database [default] > org.spark_project.guava.util.concurrent.UncheckedExecutionException: java.lang.RuntimeException: Table [t3] already exists under database [default] > at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2263) > at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) > at org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) > at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874) > at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.getUnchecked(LocalCache.java:4880) > at org.spark_project.guava.cache.LocalCache$LocalLoadingCache.apply(LocalCache.java:4898) > at org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:110) > at org.apache.spark.sql.hive.HiveSessionCatalog.lookupRelation(HiveSessionCatalog.scala:69) > at org.apache.spark.sql.SparkSession.table(SparkSession.scala:578) > at org.apache.spark.sql.SparkSession.table(SparkSession.scala:574) > at org.apache.spark.sql.execution.command.DropTableCommand.run(ddl.scala:203) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) > at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87) > at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87) > at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) > at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) > at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) > at org.apache.spark.sql.hive.CarbonHiveMetadataUtil$.invalidateAndDropTable(CarbonHiveMetadataUtil.scala:44) > at org.apache.spark.sql.hive.CarbonMetastore.dropTable(CarbonMetastore.scala:509) > at org.apache.spark.sql.execution.command.CarbonDropTableCommand.run(carbonTableSchema.scala:725) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56) > at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114) > at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135) > at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113) > at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87) > at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87) > at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185) > at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64) > at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592) > at $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:31) > at $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:36) > at $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:38) > at $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:40) > at $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:42) > at $line19.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:44) > at $line19.$read$$iw$$iw$$iw$$iw.<init>(<console>:46) > at $line19.$read$$iw$$iw$$iw.<init>(<console>:48) > at $line19.$read$$iw$$iw.<init>(<console>:50) > at $line19.$read$$iw.<init>(<console>:52) > at $line19.$read.<init>(<console>:54) > at $line19.$read$.<init>(<console>:58) > at $line19.$read$.<clinit>(<console>) > at $line19.$eval$.$print$lzycompute(<console>:7) > at $line19.$eval$.$print(<console>:6) > at $line19.$eval.$print(<console>) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) > at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) > at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) > at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) > at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) > at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) > at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) > at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) > at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) > at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) > at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) > at org.apache.spark.repl.Main$.doMain(Main.scala:68) > at org.apache.spark.repl.Main$.main(Main.scala:51) > at org.apache.spark.repl.Main.main(Main.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738) > at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) > at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Caused by: java.lang.RuntimeException: Table [t3] already exists under database [default] > at scala.sys.package$.error(package.scala:27) > at org.apache.spark.sql.execution.command.CreateTable.run(carbonTableSchema.scala:160) > at org.apache.spark.sql.CarbonSource.createTableIfNotExists(CarbonSource.scala:180) > at org.apache.spark.sql.CarbonSource.createRelation(CarbonSource.scala:114) > at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:328) > at org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1.load(HiveMetastoreCatalog.scala:76) > at org.apache.spark.sql.hive.HiveMetastoreCatalog$$anon$1.load(HiveMetastoreCatalog.scala:58) > at org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599) > at org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) > at org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342) > at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) > ... 91 more > 17/04/17 23:00:10 AUDIT CarbonDropTableCommand: [AppledeMacBook-Pro.local][apple][Thread-1]Deleted table [t3] under database [default] > res2: org.apache.spark.sql.DataFrame = [] -- This message was sent by Atlassian JIRA (v6.3.15#6346) |
Free forum by Nabble | Edit this page |