[jira] [Updated] (CARBONDATA-2912) CSV table cann't load csv data with spark2.2

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[jira] [Updated] (CARBONDATA-2912) CSV table cann't load csv data with spark2.2

Akash R Nilugal (Jira)

     [ https://issues.apache.org/jira/browse/CARBONDATA-2912?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

xubo245 updated CARBONDATA-2912:
--------------------------------
    Affects Version/s: 1.3.1
          Description:
Test Code:

{code:java}

  test("two insert into: insert into carbon table from csv table and load data") {
    sql("drop table if exists carbon_1")
    sql("drop table if exists carbon_2")
    sql(
      s"""
         | create table carbon_1(name String,age int)
         | using csv
         | options(path "$resourcesPath/cars.csv",header "true")""".stripMargin)
    sql("create table carbon_2(name String,age int) stored by 'carbondata'")
    sql("from carbon_1 insert into carbon_2 select * where age<30 insert into carbon_2 select * where age>35")
    checkAnswer(sql("select * from carbon_1"),
      Seq(Row("Bob", 27), Row("David", 33), Row("Jack", 37)))
    checkAnswer(sql("select * from carbon_2"),
      Seq(Row("Bob", 27), Row("Jack", 37)))
  }
{code}


run and throw exception:


{code:java}
18/09/04 01:20:54 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load request has been received for table default.carbon_2
18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase load data frame failed
java.lang.IllegalArgumentException: Illegal pattern component: XXX
        at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
        at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
        at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
        at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
        at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
        at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
        at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
        at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
        at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
        at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
        at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
        at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
        at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
        at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase
java.lang.IllegalArgumentException: Illegal pattern component: XXX
        at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
        at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
        at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
        at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
        at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
        at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
        at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
        at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
        at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
        at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
        at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
        at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
        at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
        at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load is failed for default.carbon_2
18/09/04 01:20:55 ERROR CarbonLoadDataCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase
java.lang.Exception: DataLoad failure
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:470)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 AUDIT CarbonLoadDataCommand: [localhost][xubo][Thread-1]Dataload failure for default.carbon_2. Please check the logs
18/09/04 01:20:55 ERROR CarbonInsertIntoCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase Got exception java.lang.Exception: DataLoad failure when processing data. But this command does not support undo yet, skipping the undo part.

{code}

  was:
Test Code:

{code:java}

  test("two insert into: insert into carbon table from csv table and load data") {
    sql("drop table if exists carbon_1")
    sql("drop table if exists carbon_2")
    sql(
      s"""
         | create table carbon_1(name String,age int)
         | using csv
         | options(path "$resourcesPath/cars.csv",header "true")""".stripMargin)
    sql("create table carbon_2(name String,age int) stored by 'carbondata'")
    sql("from carbon_1 insert into carbon_2 select * where age<30 insert into carbon_2 select * where age>35")
    checkAnswer(sql("select * from carbon_1"),
      Seq(Row("Bob", 27), Row("David", 33), Row("Jack", 37)))
    checkAnswer(sql("select * from carbon_2"),
      Seq(Row("Bob", 27), Row("Jack", 37)))
  }
{code}


||Heading 1||Heading 2||
|Col A1|Col A2|




run and throw exception:


{code:java}
18/09/04 01:20:54 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load request has been received for table default.carbon_2
18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase load data frame failed
java.lang.IllegalArgumentException: Illegal pattern component: XXX
        at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
        at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
        at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
        at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
        at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
        at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
        at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
        at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
        at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
        at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
        at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
        at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
        at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
        at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase
java.lang.IllegalArgumentException: Illegal pattern component: XXX
        at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
        at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
        at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
        at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
        at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
        at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
        at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
        at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
        at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
        at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
        at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
        at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
        at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
        at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
        at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
        at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
        at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
        at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load is failed for default.carbon_2
18/09/04 01:20:55 ERROR CarbonLoadDataCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase
java.lang.Exception: DataLoad failure
        at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:470)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
        at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
        at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
        at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
        at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
        at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
        at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
        at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
        at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
        at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
        at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
        at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
        at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
        at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
        at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
18/09/04 01:20:55 AUDIT CarbonLoadDataCommand: [localhost][xubo][Thread-1]Dataload failure for default.carbon_2. Please check the logs
18/09/04 01:20:55 ERROR CarbonInsertIntoCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase Got exception java.lang.Exception: DataLoad failure when processing data. But this command does not support undo yet, skipping the undo part.

{code}


> CSV table cann't load csv data with spark2.2
> --------------------------------------------
>
>                 Key: CARBONDATA-2912
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-2912
>             Project: CarbonData
>          Issue Type: Bug
>    Affects Versions: 1.3.1
>            Reporter: xubo245
>            Assignee: xubo245
>            Priority: Major
>
> Test Code:
> {code:java}
>   test("two insert into: insert into carbon table from csv table and load data") {
>     sql("drop table if exists carbon_1")
>     sql("drop table if exists carbon_2")
>     sql(
>       s"""
>          | create table carbon_1(name String,age int)
>          | using csv
>          | options(path "$resourcesPath/cars.csv",header "true")""".stripMargin)
>     sql("create table carbon_2(name String,age int) stored by 'carbondata'")
>     sql("from carbon_1 insert into carbon_2 select * where age<30 insert into carbon_2 select * where age>35")
>     checkAnswer(sql("select * from carbon_1"),
>       Seq(Row("Bob", 27), Row("David", 33), Row("Jack", 37)))
>     checkAnswer(sql("select * from carbon_2"),
>       Seq(Row("Bob", 27), Row("Jack", 37)))
>   }
> {code}
> run and throw exception:
> {code:java}
> 18/09/04 01:20:54 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load request has been received for table default.carbon_2
> 18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase load data frame failed
> java.lang.IllegalArgumentException: Illegal pattern component: XXX
> at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
> at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
> at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
> at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
> at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
> at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
> at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
> at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
> at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
> at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
> at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
> at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
> at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
> at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
> at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
> at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
> at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
> at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
> at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
> at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
> at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
> at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
> at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
> at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
> at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
> at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
> at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
> at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
> at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> at org.scalatest.Transformer.apply(Transformer.scala:22)
> at org.scalatest.Transformer.apply(Transformer.scala:20)
> at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
> at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
> at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
> at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
> at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
> at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
> at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
> at org.scalatest.Suite$class.run(Suite.scala:1424)
> at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
> at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
> at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
> at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
> at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
> at org.scalatest.tools.Runner$.run(Runner.scala:883)
> at org.scalatest.tools.Runner.run(Runner.scala)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
> 18/09/04 01:20:55 ERROR CarbonDataRDDFactory$: ScalaTest-run-running-InsertIntoCarbonTableTestCase
> java.lang.IllegalArgumentException: Illegal pattern component: XXX
> at org.apache.commons.lang3.time.FastDatePrinter.parsePattern(FastDatePrinter.java:282)
> at org.apache.commons.lang3.time.FastDatePrinter.init(FastDatePrinter.java:149)
> at org.apache.commons.lang3.time.FastDatePrinter.<init>(FastDatePrinter.java:142)
> at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:384)
> at org.apache.commons.lang3.time.FastDateFormat.<init>(FastDateFormat.java:369)
> at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:91)
> at org.apache.commons.lang3.time.FastDateFormat$1.createInstance(FastDateFormat.java:88)
> at org.apache.commons.lang3.time.FormatCache.getInstance(FormatCache.java:82)
> at org.apache.commons.lang3.time.FastDateFormat.getInstance(FastDateFormat.java:165)
> at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:128)
> at org.apache.spark.sql.execution.datasources.csv.CSVOptions.<init>(CSVOptions.scala:39)
> at org.apache.spark.sql.execution.datasources.csv.CSVFileFormat.buildReader(CSVFileFormat.scala:98)
> at org.apache.spark.sql.execution.datasources.FileFormat$class.buildReaderWithPartitionValues(FileFormat.scala:117)
> at org.apache.spark.sql.execution.datasources.TextBasedFileFormat.buildReaderWithPartitionValues(FileFormat.scala:148)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD$lzycompute(DataSourceScanExec.scala:285)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDD(DataSourceScanExec.scala:283)
> at org.apache.spark.sql.execution.FileSourceScanExec.inputRDDs(DataSourceScanExec.scala:303)
> at org.apache.spark.sql.execution.FilterExec.inputRDDs(basicPhysicalOperators.scala:124)
> at org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:42)
> at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:386)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.DeserializeToObjectExec.doExecute(objects.scala:95)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
> at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
> at org.apache.spark.sql.Dataset.rdd$lzycompute(Dataset.scala:2586)
> at org.apache.spark.sql.Dataset.rdd(Dataset.scala:2583)
> at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadDataFrame(CarbonDataRDDFactory.scala:966)
> at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:349)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
> at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
> at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
> at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
> at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
> at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
> at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
> at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
> at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> at org.scalatest.Transformer.apply(Transformer.scala:22)
> at org.scalatest.Transformer.apply(Transformer.scala:20)
> at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
> at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
> at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
> at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
> at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
> at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
> at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
> at org.scalatest.Suite$class.run(Suite.scala:1424)
> at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
> at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
> at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
> at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
> at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
> at org.scalatest.tools.Runner$.run(Runner.scala:883)
> at org.scalatest.tools.Runner.run(Runner.scala)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
> 18/09/04 01:20:55 AUDIT CarbonDataRDDFactory$: [localhost][xubo][Thread-1]Data load is failed for default.carbon_2
> 18/09/04 01:20:55 ERROR CarbonLoadDataCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase
> java.lang.Exception: DataLoad failure
> at org.apache.carbondata.spark.rdd.CarbonDataRDDFactory$.loadCarbonData(CarbonDataRDDFactory.scala:470)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.loadData(CarbonLoadDataCommand.scala:511)
> at org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand.processData(CarbonLoadDataCommand.scala:280)
> at org.apache.spark.sql.execution.command.management.CarbonInsertIntoCommand.processData(CarbonInsertIntoCommand.scala:83)
> at org.apache.spark.sql.execution.command.AtomicRunnableCommand.run(package.scala:92)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
> at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.UnionExec$$anonfun$doExecute$1.apply(basicPhysicalOperators.scala:556)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
> at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
> at scala.collection.AbstractTraversable.map(Traversable.scala:104)
> at org.apache.spark.sql.execution.UnionExec.doExecute(basicPhysicalOperators.scala:556)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
> at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
> at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
> at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
> at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
> at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:228)
> at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
> at org.apache.spark.sql.Dataset.<init>(Dataset.scala:185)
> at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:68)
> at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
> at org.apache.spark.sql.test.Spark2TestQueryExecutor.sql(Spark2TestQueryExecutor.scala:35)
> at org.apache.spark.sql.test.util.QueryTest.sql(QueryTest.scala:103)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply$mcV$sp(InsertIntoCarbonTableTestCase.scala:431)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase$$anonfun$23.apply(InsertIntoCarbonTableTestCase.scala:422)
> at org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
> at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
> at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> at org.scalatest.Transformer.apply(Transformer.scala:22)
> at org.scalatest.Transformer.apply(Transformer.scala:20)
> at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
> at org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
> at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
> at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
> at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
> at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
> at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
> at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
> at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
> at org.scalatest.Suite$class.run(Suite.scala:1424)
> at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
> at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
> at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.org$scalatest$BeforeAndAfterAll$$super$run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
> at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
> at org.apache.carbondata.spark.testsuite.allqueries.InsertIntoCarbonTableTestCase.run(InsertIntoCarbonTableTestCase.scala:34)
> at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
> at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
> at scala.collection.immutable.List.foreach(List.scala:381)
> at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
> at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
> at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
> at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
> at org.scalatest.tools.Runner$.run(Runner.scala:883)
> at org.scalatest.tools.Runner.run(Runner.scala)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
> at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
> 18/09/04 01:20:55 AUDIT CarbonLoadDataCommand: [localhost][xubo][Thread-1]Dataload failure for default.carbon_2. Please check the logs
> 18/09/04 01:20:55 ERROR CarbonInsertIntoCommand: ScalaTest-run-running-InsertIntoCarbonTableTestCase Got exception java.lang.Exception: DataLoad failure when processing data. But this command does not support undo yet, skipping the undo part.
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)