[jira] [Updated] (CARBONDATA-3741) Fix ParseException from hive during ALTER SET TBLPROERTIES if database name starts with Underscore

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[jira] [Updated] (CARBONDATA-3741) Fix ParseException from hive during ALTER SET TBLPROERTIES if database name starts with Underscore

Akash R Nilugal (Jira)

     [ https://issues.apache.org/jira/browse/CARBONDATA-3741?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Indhumathi Muthumurugesh updated CARBONDATA-3741:
-------------------------------------------------
    Description:
Queries:

drop database if exists _default cascade;
 create database _default;
 create table _default.OneRowTable(col1 string, col2 string, col3 int, col4 double) STORED AS carbondata;
 insert into _default.OneRowTable select * from _default.OneRowTable;

 

Check the logs and find the exception:

NoViableAltException(13@[192:1: tableName : (db= identifier DOT tab= identifier -> ^( TOK_TABNAME $db $tab) |tab= identifier -> ^( TOK_TABNAME $tab) );])
 at org.antlr.runtime.DFA.noViableAlt(DFA.java:158)
 at org.antlr.runtime.DFA.predict(DFA.java:144)
 at org.apache.hadoop.hive.ql.parse.HiveParser_FromClauseParser.tableName(HiveParser_FromClauseParser.java:4747)
 at org.apache.hadoop.hive.ql.parse.HiveParser.tableName(HiveParser.java:45920)
 at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatement(HiveParser.java:7394)
 at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2685)
 at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1650)
 at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1109)
 at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:202)
 at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:396)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
 at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
 at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
 at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
 at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:718)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:707)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:275)
 at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:213)
 at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:212)
 at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:258)
 at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:707)
 at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:697)
 at org.apache.spark.sql.hive.CarbonSessionCatalogUtil$.alterTable(CarbonSessionCatalogUtil.scala:75)
 at org.apache.spark.sql.secondaryindex.util.CarbonInternalScalaUtil$.addOrModifyTableProperty(CarbonInternalScalaUtil.scala:367)
 at org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore$.refreshIndexInfo(CarbonInternalMetastore.scala:180)
 at org.apache.spark.sql.secondaryindex.events.CreateCarbonRelationEventListener.onEvent(CreateCarbonRelationEventListener.scala:46)
 at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
 at org.apache.spark.sql.hive.CarbonFileMetastore.readCarbonSchema(CarbonFileMetastore.scala:159)
 at org.apache.spark.sql.hive.CarbonFileMetastore.createCarbonRelation(CarbonFileMetastore.scala:139)
 at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation$lzycompute(CarbonDatasourceHadoopRelation.scala:60)
 at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation(CarbonDatasourceHadoopRelation.scala:58)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.castChildOutput(CarbonAnalysisRules.scala:279)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:271)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:265)
 at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
 at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
 at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:258)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:248)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:265)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:263)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
 at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
 at scala.collection.immutable.List.foldLeft(List.scala:84)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
 at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
 at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
 at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
 at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
 at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
 at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
 at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
 at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
 at org.apache.spark.sql.test.SparkTestQueryExecutor.sql(SparkTestQueryExecutor.scala:36)
 at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:114)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply$mcV$sp(InsertIntoCarbonTableSpark2TestCase.scala:38)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
 at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
 at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
 at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
 at org.scalatest.Transformer.apply(Transformer.scala:22)
 at org.scalatest.Transformer.apply(Transformer.scala:20)
 at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
 at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
 at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
 at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
 at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
 at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
 at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
 at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
 at org.scalatest.Suite$class.run(Suite.scala:1424)
 at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
 at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableSpark2TestCase.scala:23)
 at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
 at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.run(InsertIntoCarbonTableSpark2TestCase.scala:23)
 at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
 at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
 at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
 at org.scalatest.tools.Runner$.run(Runner.scala:883)
 at org.scalatest.tools.Runner.run(Runner.scala)
 at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
 at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
2020-03-13 14:48:16 ERROR Driver:960 - FAILED: ParseException line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name
org.apache.hadoop.hive.ql.parse.ParseException: line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name
 at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:205)
 at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:396)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
 at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
 at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
 at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
 at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:718)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:707)
 at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:275)
 at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:213)
 at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:212)
 at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:258)
 at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:707)
 at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:697)
 at org.apache.spark.sql.hive.CarbonSessionCatalogUtil$.alterTable(CarbonSessionCatalogUtil.scala:75)
 at org.apache.spark.sql.secondaryindex.util.CarbonInternalScalaUtil$.addOrModifyTableProperty(CarbonInternalScalaUtil.scala:367)
 at org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore$.refreshIndexInfo(CarbonInternalMetastore.scala:180)
 at org.apache.spark.sql.secondaryindex.events.CreateCarbonRelationEventListener.onEvent(CreateCarbonRelationEventListener.scala:46)
 at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
 at org.apache.spark.sql.hive.CarbonFileMetastore.readCarbonSchema(CarbonFileMetastore.scala:159)
 at org.apache.spark.sql.hive.CarbonFileMetastore.createCarbonRelation(CarbonFileMetastore.scala:139)
 at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation$lzycompute(CarbonDatasourceHadoopRelation.scala:60)
 at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation(CarbonDatasourceHadoopRelation.scala:58)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.castChildOutput(CarbonAnalysisRules.scala:279)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:271)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:265)
 at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
 at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
 at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:258)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
 at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:248)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:265)
 at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:263)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
 at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
 at scala.collection.immutable.List.foldLeft(List.scala:84)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
 at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
 at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
 at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
 at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
 at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
 at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
 at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
 at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
 at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
 at org.apache.spark.sql.test.SparkTestQueryExecutor.sql(SparkTestQueryExecutor.scala:36)
 at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:114)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply$mcV$sp(InsertIntoCarbonTableSpark2TestCase.scala:38)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
 at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
 at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
 at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
 at org.scalatest.Transformer.apply(Transformer.scala:22)
 at org.scalatest.Transformer.apply(Transformer.scala:20)
 at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
 at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
 at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
 at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
 at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
 at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
 at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
 at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
 at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
 at org.scalatest.Suite$class.run(Suite.scala:1424)
 at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
 at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableSpark2TestCase.scala:23)
 at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
 at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
 at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.run(InsertIntoCarbonTableSpark2TestCase.scala:23)
 at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
 at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
 at scala.collection.immutable.List.foreach(List.scala:381)
 at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
 at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
 at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
 at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
 at org.scalatest.tools.Runner$.run(Runner.scala:883)
 at org.scalatest.tools.Runner.run(Runner.scala)
 at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
 at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)

2020-03-13 14:48:16 ERROR HiveClientImpl:70 -
======================
HIVE FAILURE OUTPUT
======================
FAILED: ParseException line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name

======================
END HIVE FAILURE OUTPUT
======================

> Fix ParseException from hive during ALTER SET TBLPROERTIES if database name starts with Underscore
> --------------------------------------------------------------------------------------------------
>
>                 Key: CARBONDATA-3741
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-3741
>             Project: CarbonData
>          Issue Type: Bug
>            Reporter: Indhumathi Muthumurugesh
>            Priority: Minor
>
> Queries:
> drop database if exists _default cascade;
>  create database _default;
>  create table _default.OneRowTable(col1 string, col2 string, col3 int, col4 double) STORED AS carbondata;
>  insert into _default.OneRowTable select * from _default.OneRowTable;
>  
> Check the logs and find the exception:
> NoViableAltException(13@[192:1: tableName : (db= identifier DOT tab= identifier -> ^( TOK_TABNAME $db $tab) |tab= identifier -> ^( TOK_TABNAME $tab) );])
>  at org.antlr.runtime.DFA.noViableAlt(DFA.java:158)
>  at org.antlr.runtime.DFA.predict(DFA.java:144)
>  at org.apache.hadoop.hive.ql.parse.HiveParser_FromClauseParser.tableName(HiveParser_FromClauseParser.java:4747)
>  at org.apache.hadoop.hive.ql.parse.HiveParser.tableName(HiveParser.java:45920)
>  at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatement(HiveParser.java:7394)
>  at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2685)
>  at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1650)
>  at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1109)
>  at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:202)
>  at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
>  at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:396)
>  at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
>  at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
>  at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:718)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:707)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:275)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:213)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:212)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:258)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:707)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:697)
>  at org.apache.spark.sql.hive.CarbonSessionCatalogUtil$.alterTable(CarbonSessionCatalogUtil.scala:75)
>  at org.apache.spark.sql.secondaryindex.util.CarbonInternalScalaUtil$.addOrModifyTableProperty(CarbonInternalScalaUtil.scala:367)
>  at org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore$.refreshIndexInfo(CarbonInternalMetastore.scala:180)
>  at org.apache.spark.sql.secondaryindex.events.CreateCarbonRelationEventListener.onEvent(CreateCarbonRelationEventListener.scala:46)
>  at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
>  at org.apache.spark.sql.hive.CarbonFileMetastore.readCarbonSchema(CarbonFileMetastore.scala:159)
>  at org.apache.spark.sql.hive.CarbonFileMetastore.createCarbonRelation(CarbonFileMetastore.scala:139)
>  at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation$lzycompute(CarbonDatasourceHadoopRelation.scala:60)
>  at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation(CarbonDatasourceHadoopRelation.scala:58)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.castChildOutput(CarbonAnalysisRules.scala:279)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:271)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:265)
>  at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
>  at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
>  at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
>  at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:258)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:248)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:265)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:263)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
>  at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>  at scala.collection.immutable.List.foldLeft(List.scala:84)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
>  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
>  at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
>  at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
>  at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
>  at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
>  at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
>  at org.apache.spark.sql.test.SparkTestQueryExecutor.sql(SparkTestQueryExecutor.scala:36)
>  at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:114)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply$mcV$sp(InsertIntoCarbonTableSpark2TestCase.scala:38)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
>  at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
>  at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
>  at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
>  at org.scalatest.Transformer.apply(Transformer.scala:22)
>  at org.scalatest.Transformer.apply(Transformer.scala:20)
>  at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
>  at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
>  at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
>  at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>  at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
>  at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
>  at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
>  at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>  at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>  at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
>  at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
>  at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
>  at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
>  at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
>  at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
>  at org.scalatest.Suite$class.run(Suite.scala:1424)
>  at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
>  at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>  at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
>  at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableSpark2TestCase.scala:23)
>  at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
>  at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.run(InsertIntoCarbonTableSpark2TestCase.scala:23)
>  at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
>  at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
>  at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
>  at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
>  at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
>  at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
>  at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
>  at org.scalatest.tools.Runner$.run(Runner.scala:883)
>  at org.scalatest.tools.Runner.run(Runner.scala)
>  at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
> 2020-03-13 14:48:16 ERROR Driver:960 - FAILED: ParseException line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name
> org.apache.hadoop.hive.ql.parse.ParseException: line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name
>  at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:205)
>  at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
>  at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:396)
>  at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308)
>  at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122)
>  at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:718)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$runHive$1.apply(HiveClientImpl.scala:707)
>  at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:275)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:213)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:212)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:258)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.runHive(HiveClientImpl.scala:707)
>  at org.apache.spark.sql.hive.client.HiveClientImpl.runSqlHive(HiveClientImpl.scala:697)
>  at org.apache.spark.sql.hive.CarbonSessionCatalogUtil$.alterTable(CarbonSessionCatalogUtil.scala:75)
>  at org.apache.spark.sql.secondaryindex.util.CarbonInternalScalaUtil$.addOrModifyTableProperty(CarbonInternalScalaUtil.scala:367)
>  at org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore$.refreshIndexInfo(CarbonInternalMetastore.scala:180)
>  at org.apache.spark.sql.secondaryindex.events.CreateCarbonRelationEventListener.onEvent(CreateCarbonRelationEventListener.scala:46)
>  at org.apache.carbondata.events.OperationListenerBus.fireEvent(OperationListenerBus.java:83)
>  at org.apache.spark.sql.hive.CarbonFileMetastore.readCarbonSchema(CarbonFileMetastore.scala:159)
>  at org.apache.spark.sql.hive.CarbonFileMetastore.createCarbonRelation(CarbonFileMetastore.scala:139)
>  at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation$lzycompute(CarbonDatasourceHadoopRelation.scala:60)
>  at org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation(CarbonDatasourceHadoopRelation.scala:58)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.castChildOutput(CarbonAnalysisRules.scala:279)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:271)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts$$anonfun$apply$2.applyOrElse(CarbonAnalysisRules.scala:265)
>  at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
>  at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:259)
>  at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
>  at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:258)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$class.transformDown(AnalysisHelper.scala:149)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
>  at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:248)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:265)
>  at org.apache.spark.sql.hive.CarbonPreInsertionCasts.apply(CarbonAnalysisRules.scala:263)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:87)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:84)
>  at scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:124)
>  at scala.collection.immutable.List.foldLeft(List.scala:84)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:84)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:76)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:76)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:127)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:121)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:106)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer$$anonfun$executeAndCheck$1.apply(Analyzer.scala:105)
>  at org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:201)
>  at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
>  at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
>  at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
>  at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
>  at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:78)
>  at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:642)
>  at org.apache.spark.sql.test.SparkTestQueryExecutor.sql(SparkTestQueryExecutor.scala:36)
>  at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:114)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply$mcV$sp(InsertIntoCarbonTableSpark2TestCase.scala:38)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase$$anonfun$2.apply(InsertIntoCarbonTableSpark2TestCase.scala:34)
>  at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
>  at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
>  at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
>  at org.scalatest.Transformer.apply(Transformer.scala:22)
>  at org.scalatest.Transformer.apply(Transformer.scala:20)
>  at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
>  at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
>  at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
>  at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>  at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
>  at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
>  at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
>  at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>  at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>  at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
>  at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
>  at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
>  at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
>  at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
>  at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
>  at org.scalatest.Suite$class.run(Suite.scala:1424)
>  at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
>  at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>  at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
>  at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableSpark2TestCase.scala:23)
>  at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
>  at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
>  at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableSpark2TestCase.run(InsertIntoCarbonTableSpark2TestCase.scala:23)
>  at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
>  at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
>  at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
>  at scala.collection.immutable.List.foreach(List.scala:381)
>  at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
>  at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
>  at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
>  at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
>  at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
>  at org.scalatest.tools.Runner$.run(Runner.scala:883)
>  at org.scalatest.tools.Runner.run(Runner.scala)
>  at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
> 2020-03-13 14:48:16 ERROR HiveClientImpl:70 -
> ======================
> HIVE FAILURE OUTPUT
> ======================
> FAILED: ParseException line 1:12 cannot recognize input near '_default._onerowtable' 'SET' 'TBLPROPERTIES' in table name
> ======================
> END HIVE FAILURE OUTPUT
> ======================



--
This message was sent by Atlassian Jira
(v8.3.4#803005)