Indhumathi27 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r566686354 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala ########## @@ -577,6 +589,59 @@ class TestSIWithSecondaryIndex extends QueryTest with BeforeAndAfterAll { assert(ex.getMessage.contains("Problem loading data while creating secondary index:")) } + test("test SI with carbon.use.local.dir as true") { + CarbonProperties.getInstance() Review comment: @nihal0107 if this property is not used anywhere, then you remove it ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
VenuReddy2103 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r566752898 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala ########## @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.spark.testsuite.secondaryindex + +import org.apache.spark.sql.CarbonEnv +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.index.CarbonIndexUtil +import org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore +import org.apache.spark.sql.test.util.QueryTest +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} + +import org.apache.carbondata.core.metadata.schema.table.CarbonTable + +class TestCarbonInternalMetastore extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach { + + val dbName: String = "test" + val tableName: String = "table1" + val indexName: String = "index1" + var tableIdentifier: TableIdentifier = _ + var parentCarbonTable: CarbonTable = _ + var indexTable: CarbonTable = _ + + override def beforeAll(): Unit = { + sql("drop database if exists test cascade"); + } + + override def beforeEach(): Unit = { + sql("drop database if exists test cascade"); + sql("create database test") + sql("use test") + sql("drop table if exists table1") + sql("create table table1(a string, b string, c string) stored as carbondata") + sql("create index index1 on table1(b) as 'carbondata'") + sql("insert into table1 values('ab','bc','cd')") + } + + def setVariables(indexName: String): Unit = { + tableIdentifier = new TableIdentifier(indexName, Some("test")) + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + indexTable = CarbonEnv.getCarbonTable(Some("test"), "index1")(sqlContext.sparkSession) + } + + test("test delete index silent") { + setVariables("index1") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), false, "index1") + } + + test("test delete index table silently when exception occur") { + setVariables("unknown") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), true, "index1") + setVariables("index1") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), false, "index1") + + sql("drop index if exists index1 on table1") + sql("create index index1 on table1(b) as 'carbondata'") + setVariables("index1") + // delete will fail as we are giving indexTable as parentTable. + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + indexTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), true, "index1") + } + + test("test show index when SI were created before the change CARBONDATA-3765") { + val mock = TestSecondaryIndexUtils.mockIndexInfo() + checkExistence(sql("show indexes on table1"), true, "index1") + mock.tearDown() + } + + test("test refresh index with different value of isIndexTableExists") { + setVariables("index1") + sql("create index index2 on table1(b) as 'bloomfilter'") + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + assert(CarbonIndexUtil.isIndexExists(parentCarbonTable).equalsIgnoreCase("true")) + assert(CarbonIndexUtil.isIndexTableExists(parentCarbonTable).equalsIgnoreCase("true")) + CarbonIndexUtil.addOrModifyTableProperty(parentCarbonTable, Map("indexExists" -> "false", + "indextableexists" -> "false"))(sqlContext.sparkSession) + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + assert(CarbonIndexUtil.isIndexExists(parentCarbonTable).equalsIgnoreCase("false")) + assert(CarbonIndexUtil.isIndexTableExists(parentCarbonTable).equalsIgnoreCase("false")) + val mock = TestSecondaryIndexUtils.mockIsIndexTableExists() Review comment: IMO, we don't need this mock. Could have just removed `indextableexists` and/or `indexexists` property appropriately from parent table. I mean, can do like below `parentCarbonTable.getTableInfo.getFactTable.getTableProperties.remove("indextableexists")` and/or `parentCarbonTable.getTableInfo.getFactTable.getTableProperties.remove("indexexists")` appropriately. Probably can check all testcases using `mockIsIndexTableExists()` and `mockIsIndexExists()`. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
VenuReddy2103 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r566795460 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala ########## @@ -88,4 +90,16 @@ class DropTableTest extends QueryTest with BeforeAndAfterAll { assert(sql("show indexes on testDrop").collect().isEmpty) sql("drop table if exists testDrop") } + + test("test index drop when SI table is not deleted while main table is deleted") { + sql("drop table if exists testDrop") + sql("create table testDrop (a string, b string, c string) STORED AS carbondata") + sql("create index index11 on table testDrop (c) AS 'carbondata'") + sql("insert into testDrop values('ab', 'cd', 'ef')") + val mock = TestSecondaryIndexUtils.mockSIDrop() Review comment: I think, we should either run this testcase in a non-default db or drop index like any other normal table i.e., add `sql("drop table if exists index11")` before create index(line 97). Because, we have mocked to fail SI deletion. SI table remains in default db thereafter. Hence, if we try to run this testcase multiple times, it fails to create index with `index11` name. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567570962 ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala ########## @@ -363,5 +365,20 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 'carbondata'") checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), Row("ff"))) sql("drop index if exists parallel_index on parallel_index_load") + val mock = mockIsSchemaModified() + sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 'carbondata'") + checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), Row("ff"))) + sql("drop index if exists parallel_index on parallel_index_load") + mock.tearDown() Review comment: We don't know the order of execution of testcases in a file. If we will remove it from here then it may impact the other testcases. I think it's better to keep teardown for mocked function because we may not required to mock the function for other testcases. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567657351 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala ########## @@ -327,6 +259,35 @@ class CarbonDataFileMergeTestCaseOnSI .queryExecution.sparkPlan assert(getDataFileCount("nonindexmerge_index1", "0") == 100) assert(getDataFileCount("nonindexmerge_index1", "1") == 100) + + // exception is thrown by compaction executor + val mock3 = TestSecondaryIndexUtils.mockCompactionExecutor() + val exception2 = intercept[Exception] { + sql("REFRESH INDEX nonindexmerge_index1 ON TABLE nonindexmerge").collect() + } + mock3.tearDown() + assert(exception2.getMessage.contains("Merge data files Failure in Merger Rdd.")) + df1 = sql("""Select * from nonindexmerge where name='n16000'""") + .queryExecution.sparkPlan + assert(getDataFileCount("nonindexmerge_index1", "0") == 100) + assert(getDataFileCount("nonindexmerge_index1", "1") == 100) + CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, + CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE_DEFAULT) + } + + test("test refresh index command when block need to be sorted") { + CarbonProperties.getInstance() + .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "false") + createTableAndLoadData("100", 2) + sql("CREATE INDEX nonindexmerge_index1 on table nonindexmerge (name) AS 'carbondata'") + val mock = TestSecondaryIndexUtils.mockIsSortRequired(); + sql("REFRESH INDEX nonindexmerge_index1 ON TABLE nonindexmerge").collect() + mock.tearDown() + val df1 = sql("""Select * from nonindexmerge where name='n16000'""") + .queryExecution.sparkPlan + assert(isFilterPushedDownToSI(df1)) + assert(getDataFileCount("nonindexmerge_index1", "0") < 15) + assert(getDataFileCount("nonindexmerge_index1", "1") < 15) Review comment: In beforeall we are setting this property as true. That's why in the beginning it is set to false, make it true at end of testcase and also made the changes for other testcases. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567670208 ########## File path: integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithColumnMetCacheAndCacheLevelProperty.scala ########## @@ -363,5 +365,20 @@ class TestQueryWithColumnMetCacheAndCacheLevelProperty sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 'carbondata'") checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), Row("ff"))) sql("drop index if exists parallel_index on parallel_index_load") + val mock = mockIsSchemaModified() + sql("CREATE INDEX parallel_index on parallel_index_load(b) AS 'carbondata'") + checkAnswer(sql("select b from parallel_index"), Seq(Row("bb"), Row("dd"), Row("ff"))) + sql("drop index if exists parallel_index on parallel_index_load") + mock.tearDown() + } + + def mockIsSchemaModified(): MockUp[TableInfo] = { Review comment: moved inside testcase. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567670649 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/mergedata/CarbonDataFileMergeTestCaseOnSI.scala ########## @@ -288,20 +216,24 @@ class CarbonDataFileMergeTestCaseOnSI CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE_DEFAULT) } - test("test verify data file merge when exception occurred in rebuild segment") { - CarbonProperties.getInstance() - .addProperty(CarbonCommonConstants.CARBON_SI_SEGMENT_MERGE, "false") + def createTableAndLoadData(globalSortPartition: String, loadTimes: Int): Unit = { sql("DROP TABLE IF EXISTS nonindexmerge") sql( """ | CREATE TABLE nonindexmerge(id INT, name STRING, city STRING, age INT) | STORED AS carbondata | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT') """.stripMargin) - sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " + - s"'GLOBAL_SORT_PARTITIONS'='100')") - sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE nonindexmerge OPTIONS('header'='false', " + - s"'GLOBAL_SORT_PARTITIONS'='100')") + for (_ <- 0 to loadTimes) { Review comment: yeah wanted to use until load times. Made the change. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567671338 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala ########## @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.spark.testsuite.secondaryindex + +import org.apache.spark.sql.CarbonEnv +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.index.CarbonIndexUtil +import org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore +import org.apache.spark.sql.test.util.QueryTest +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} + +import org.apache.carbondata.core.metadata.schema.table.CarbonTable + +class TestCarbonInternalMetastore extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach { + + val dbName: String = "test" + val tableName: String = "table1" + val indexName: String = "index1" + var tableIdentifier: TableIdentifier = _ + var parentCarbonTable: CarbonTable = _ + var indexTable: CarbonTable = _ + + override def beforeAll(): Unit = { + sql("drop database if exists test cascade"); + } + + override def beforeEach(): Unit = { + sql("drop database if exists test cascade"); Review comment: done the changes. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567674967 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCreateIndexWithLoadAndCompaction.scala ########## @@ -283,19 +285,32 @@ class TestCreateIndexWithLoadAndCompaction extends QueryTest with BeforeAndAfter } sql("ALTER TABLE table1 COMPACT 'CUSTOM' WHERE SEGMENT.ID IN (1,2,3)") - val segments = sql("SHOW SEGMENTS FOR TABLE idx1") - val segInfos = segments.collect().map { each => - ((each.toSeq) (0).toString, (each.toSeq) (1).toString) - } - assert(segInfos.length == 6) + val segInfos = checkSegmentList(6) assert(segInfos.contains(("0", "Success"))) assert(segInfos.contains(("1", "Compacted"))) assert(segInfos.contains(("2", "Compacted"))) assert(segInfos.contains(("3", "Compacted"))) assert(segInfos.contains(("1.1", "Success"))) assert(segInfos.contains(("4", "Success"))) checkAnswer(sql("select * from table1 where c3='b2'"), Seq(Row(3, "a2", "b2"))) - sql("drop table if exists table1") + + // after clean files + val mock = TestSecondaryIndexUtils.mockreadSegmentList() Review comment: We have mocked here to have some invalid segment in case of clean files for SI. Added assert for those scenario also. This testcase will cover the lines for `CleanFilesPostEventListener.cleanUpUnwantedSegmentsOfSIAndUpdateMetadata` ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567675597 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestSIWithSecondaryIndex.scala ########## @@ -577,6 +589,59 @@ class TestSIWithSecondaryIndex extends QueryTest with BeforeAndAfterAll { assert(ex.getMessage.contains("Problem loading data while creating secondary index:")) } + test("test SI with carbon.use.local.dir as true") { + CarbonProperties.getInstance() Review comment: wanted to add for `carbon.use.local.dir` as `false`. Made the changes. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770762628 Build Success with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5389/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770768986 Build Success with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3629/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567759675 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestCarbonInternalMetastore.scala ########## @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.spark.testsuite.secondaryindex + +import org.apache.spark.sql.CarbonEnv +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.index.CarbonIndexUtil +import org.apache.spark.sql.secondaryindex.hive.CarbonInternalMetastore +import org.apache.spark.sql.test.util.QueryTest +import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} + +import org.apache.carbondata.core.metadata.schema.table.CarbonTable + +class TestCarbonInternalMetastore extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach { + + val dbName: String = "test" + val tableName: String = "table1" + val indexName: String = "index1" + var tableIdentifier: TableIdentifier = _ + var parentCarbonTable: CarbonTable = _ + var indexTable: CarbonTable = _ + + override def beforeAll(): Unit = { + sql("drop database if exists test cascade"); + } + + override def beforeEach(): Unit = { + sql("drop database if exists test cascade"); + sql("create database test") + sql("use test") + sql("drop table if exists table1") + sql("create table table1(a string, b string, c string) stored as carbondata") + sql("create index index1 on table1(b) as 'carbondata'") + sql("insert into table1 values('ab','bc','cd')") + } + + def setVariables(indexName: String): Unit = { + tableIdentifier = new TableIdentifier(indexName, Some("test")) + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + indexTable = CarbonEnv.getCarbonTable(Some("test"), "index1")(sqlContext.sparkSession) + } + + test("test delete index silent") { + setVariables("index1") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), false, "index1") + } + + test("test delete index table silently when exception occur") { + setVariables("unknown") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), true, "index1") + setVariables("index1") + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + parentCarbonTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), false, "index1") + + sql("drop index if exists index1 on table1") + sql("create index index1 on table1(b) as 'carbondata'") + setVariables("index1") + // delete will fail as we are giving indexTable as parentTable. + CarbonInternalMetastore.deleteIndexSilent(tableIdentifier, "", + indexTable)(sqlContext.sparkSession) + checkExistence(sql("show indexes on table1"), true, "index1") + } + + test("test show index when SI were created before the change CARBONDATA-3765") { + val mock = TestSecondaryIndexUtils.mockIndexInfo() + checkExistence(sql("show indexes on table1"), true, "index1") + mock.tearDown() + } + + test("test refresh index with different value of isIndexTableExists") { + setVariables("index1") + sql("create index index2 on table1(b) as 'bloomfilter'") + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + assert(CarbonIndexUtil.isIndexExists(parentCarbonTable).equalsIgnoreCase("true")) + assert(CarbonIndexUtil.isIndexTableExists(parentCarbonTable).equalsIgnoreCase("true")) + CarbonIndexUtil.addOrModifyTableProperty(parentCarbonTable, Map("indexExists" -> "false", + "indextableexists" -> "false"))(sqlContext.sparkSession) + parentCarbonTable = CarbonEnv.getCarbonTable(Some("test"), "table1")(sqlContext.sparkSession) + assert(CarbonIndexUtil.isIndexExists(parentCarbonTable).equalsIgnoreCase("false")) + assert(CarbonIndexUtil.isIndexTableExists(parentCarbonTable).equalsIgnoreCase("false")) + val mock = TestSecondaryIndexUtils.mockIsIndexTableExists() Review comment: removed and changed as suggested. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on a change in pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#discussion_r567759908 ########## File path: index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/DropTableTest.scala ########## @@ -88,4 +90,16 @@ class DropTableTest extends QueryTest with BeforeAndAfterAll { assert(sql("show indexes on testDrop").collect().isEmpty) sql("drop table if exists testDrop") } + + test("test index drop when SI table is not deleted while main table is deleted") { + sql("drop table if exists testDrop") + sql("create table testDrop (a string, b string, c string) STORED AS carbondata") + sql("create index index11 on table testDrop (c) AS 'carbondata'") + sql("insert into testDrop values('ab', 'cd', 'ef')") + val mock = TestSecondaryIndexUtils.mockSIDrop() Review comment: made the changes for the nondefault database ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770844763 Build Failed with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3632/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770849879 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5393/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770853160 retest this please ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770910752 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5396/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-770914272 Build Failed with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3636/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
nihal0107 commented on pull request #4071: URL: https://github.com/apache/carbondata/pull/4071#issuecomment-771354445 retest this please ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
Free forum by Nabble | Edit this page |