Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/2061/ --- |
In reply to this post by qiuchenjian-2
Github user manishgupta88 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243163786 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala --- @@ -1511,7 +1514,15 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser { } DataTypeInfo("decimal", precision, scale) case _ => - throw new MalformedCarbonCommandException("Data type provided is invalid.") + if (isColumnRename) { + if (dataType.equalsIgnoreCase("decimal")) { --- End diff -- instead of if else block use case matching here --- |
In reply to this post by qiuchenjian-2
Github user manishgupta88 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243164028 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala --- @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.schema + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +import org.apache.spark.sql.{CarbonEnv, Row, SparkSession} +import org.apache.spark.sql.execution.command.{AlterTableDataTypeChangeModel, DataTypeInfo, + MetadataCommand} +import org.apache.spark.sql.hive.CarbonSessionCatalog +import org.apache.spark.util.AlterTableUtil + +import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException +import org.apache.carbondata.common.logging.LogServiceFactory +import org.apache.carbondata.core.features.TableOperation +import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage} +import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl +import org.apache.carbondata.core.metadata.datatype.DecimalType +import org.apache.carbondata.core.metadata.schema.table.CarbonTable +import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn +import org.apache.carbondata.events.{AlterTableColRenameAndDataTypeChangePostEvent, + AlterTableColRenameAndDataTypeChangePreEvent, OperationContext, OperationListenerBus} +import org.apache.carbondata.format.{ColumnSchema, SchemaEvolutionEntry, TableInfo} +import org.apache.carbondata.spark.util.DataTypeConverterUtil + +abstract class CarbonAlterTableColumnRenameCommand(oldColumnName: String, newColumnName: String) + extends MetadataCommand { + + protected def validColumnsForRenaming(carbonColumns: mutable.Buffer[CarbonColumn], + oldCarbonColumn: CarbonColumn, + carbonTable: CarbonTable): Unit = { + // check whether new column name is already an existing column name + if (carbonColumns.exists(_.getColName.equalsIgnoreCase(newColumnName))) { + throw new MalformedCarbonCommandException(s"Column Rename Operation failed. New " + + s"column name $newColumnName already exists" + + s" in table ${ carbonTable.getTableName }") + } + + // if the column rename is for complex column, block the operation + if (oldCarbonColumn.isComplex) { + throw new MalformedCarbonCommandException(s"Column Rename Operation failed. Rename " + + s"column is unsupported for complex datatype " + + s"column ${ oldCarbonColumn.getColName }") + } + + // if column rename operation is on partition column, then fail the rename operation + if (null != carbonTable.getPartitionInfo) { + val partitionColumns = carbonTable.getPartitionInfo.getColumnSchemaList + partitionColumns.asScala.foreach { + col => + if (col.getColumnName.equalsIgnoreCase(oldColumnName)) { + throw new MalformedCarbonCommandException( + s"Column Rename Operation failed. Renaming " + + s"the partition column $newColumnName is not " + + s"allowed") + } + } + } + + } +} + +private[sql] case class CarbonAlterTableColRenameDataTypeChangeCommand( + alterTableColRenameAndDataTypeChangeModel: AlterTableDataTypeChangeModel) + extends CarbonAlterTableColumnRenameCommand(alterTableColRenameAndDataTypeChangeModel.columnName, + alterTableColRenameAndDataTypeChangeModel.newColumnName) { + + override def processMetadata(sparkSession: SparkSession): Seq[Row] = { + val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) + val tableName = alterTableColRenameAndDataTypeChangeModel.tableName + val dbName = alterTableColRenameAndDataTypeChangeModel.databaseName + .getOrElse(sparkSession.catalog.currentDatabase) + var isDataTypeChange = false + setAuditTable(dbName, tableName) + setAuditInfo(Map( + "column" -> alterTableColRenameAndDataTypeChangeModel.columnName, + "newColumn" -> alterTableColRenameAndDataTypeChangeModel.newColumnName, + "newType" -> alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.dataType)) + val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK) + var locks = List.empty[ICarbonLock] + // get the latest carbon table and check for column existence + var carbonTable: CarbonTable = null + var timeStamp = 0L + try { + locks = AlterTableUtil + .validateTableAndAcquireLock(dbName, tableName, locksToBeAcquired)(sparkSession) + val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore + carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession) + if (!alterTableColRenameAndDataTypeChangeModel.isColumnRename && + !carbonTable.canAllow(carbonTable, TableOperation.ALTER_CHANGE_DATATYPE, + alterTableColRenameAndDataTypeChangeModel.columnName)) { + throw new MalformedCarbonCommandException( + "alter table change datatype is not supported for index datamap") + } + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename && + !carbonTable.canAllow(carbonTable, TableOperation.ALTER_COLUMN_RENAME, + alterTableColRenameAndDataTypeChangeModel.columnName)) { + throw new MalformedCarbonCommandException( + "alter table column rename is not supported for index datamap") + } + val operationContext = new OperationContext + val alterTableColRenameAndDataTypeChangePreEvent = + AlterTableColRenameAndDataTypeChangePreEvent(sparkSession, carbonTable, + alterTableColRenameAndDataTypeChangeModel) + OperationListenerBus.getInstance() + .fireEvent(alterTableColRenameAndDataTypeChangePreEvent, operationContext) + val newColumnName = alterTableColRenameAndDataTypeChangeModel.newColumnName.toLowerCase + val oldColumnName = alterTableColRenameAndDataTypeChangeModel.columnName.toLowerCase + val carbonColumns = carbonTable.getCreateOrderColumn(tableName).asScala.filter(!_.isInvisible) + if (!carbonColumns.exists(_.getColName.equalsIgnoreCase(oldColumnName))) { + throwMetadataException(dbName, tableName, s"Column does not exist: $oldColumnName") + } + + val oldCarbonColumn = carbonColumns.filter(_.getColName.equalsIgnoreCase(oldColumnName)) + if (oldCarbonColumn.size != 1) { + throwMetadataException(dbName, tableName, s"Invalid Column: $oldColumnName") + } + val newColumnPrecision = alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.precision + val newColumnScale = alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.scale + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { + // validate the columns to be renamed + validColumnsForRenaming(carbonColumns, oldCarbonColumn.head, carbonTable) + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { --- End diff -- above if loop is also checking for the same condition --- |
In reply to this post by qiuchenjian-2
Github user akashrn5 commented on the issue:
https://github.com/apache/carbondata/pull/2990 @manishgupta88 handled, please review --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/1865/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/1867/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/1868/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Failed with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/10123/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Failed with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/2076/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/1871/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/2080/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2990 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/10126/ --- |
In reply to this post by qiuchenjian-2
Github user manishgupta88 commented on the issue:
https://github.com/apache/carbondata/pull/2990 LGTM --- |
In reply to this post by qiuchenjian-2
Github user brijoobopanna commented on the issue:
https://github.com/apache/carbondata/pull/2990 @akashrn5 : plz get LGTM from Likun too --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243242285 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala --- @@ -1487,16 +1487,19 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser { * @param values * @return */ - def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = { + def parseDataType( + dataType: String, + values: Option[List[(Int, Int)]], + isColumnRename: Boolean): DataTypeInfo = { --- End diff -- complete the comment start from line 1486 --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243245093 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala --- @@ -1511,7 +1514,16 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser { } DataTypeInfo("decimal", precision, scale) case _ => - throw new MalformedCarbonCommandException("Data type provided is invalid.") + if (isColumnRename) { + dataType match { --- End diff -- why here match `dataType` again which is already match in line 1496. Seems this func need to be refactored --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243247751 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableColRenameDataTypeChangeCommand.scala --- @@ -0,0 +1,324 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.schema + +import scala.collection.JavaConverters._ +import scala.collection.mutable + +import org.apache.spark.sql.{CarbonEnv, Row, SparkSession} +import org.apache.spark.sql.execution.command.{AlterTableDataTypeChangeModel, DataTypeInfo, + MetadataCommand} +import org.apache.spark.sql.hive.CarbonSessionCatalog +import org.apache.spark.util.AlterTableUtil + +import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException +import org.apache.carbondata.common.logging.LogServiceFactory +import org.apache.carbondata.core.features.TableOperation +import org.apache.carbondata.core.locks.{ICarbonLock, LockUsage} +import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl +import org.apache.carbondata.core.metadata.datatype.DecimalType +import org.apache.carbondata.core.metadata.schema.table.CarbonTable +import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn +import org.apache.carbondata.events.{AlterTableColRenameAndDataTypeChangePostEvent, + AlterTableColRenameAndDataTypeChangePreEvent, OperationContext, OperationListenerBus} +import org.apache.carbondata.format.{ColumnSchema, SchemaEvolutionEntry, TableInfo} +import org.apache.carbondata.spark.util.DataTypeConverterUtil + +abstract class CarbonAlterTableColumnRenameCommand(oldColumnName: String, newColumnName: String) + extends MetadataCommand { + + protected def validColumnsForRenaming(carbonColumns: mutable.Buffer[CarbonColumn], + oldCarbonColumn: CarbonColumn, + carbonTable: CarbonTable): Unit = { + // check whether new column name is already an existing column name + if (carbonColumns.exists(_.getColName.equalsIgnoreCase(newColumnName))) { + throw new MalformedCarbonCommandException(s"Column Rename Operation failed. New " + + s"column name $newColumnName already exists" + + s" in table ${ carbonTable.getTableName }") + } + + // if the column rename is for complex column, block the operation + if (oldCarbonColumn.isComplex) { + throw new MalformedCarbonCommandException(s"Column Rename Operation failed. Rename " + + s"column is unsupported for complex datatype " + + s"column ${ oldCarbonColumn.getColName }") + } + + // if column rename operation is on partition column, then fail the rename operation + if (null != carbonTable.getPartitionInfo) { + val partitionColumns = carbonTable.getPartitionInfo.getColumnSchemaList + partitionColumns.asScala.foreach { + col => + if (col.getColumnName.equalsIgnoreCase(oldColumnName)) { + throw new MalformedCarbonCommandException( + s"Column Rename Operation failed. Renaming " + + s"the partition column $newColumnName is not " + + s"allowed") + } + } + } + + } +} + +private[sql] case class CarbonAlterTableColRenameDataTypeChangeCommand( + alterTableColRenameAndDataTypeChangeModel: AlterTableDataTypeChangeModel, + childTableColumnRename: Boolean = false) + extends CarbonAlterTableColumnRenameCommand(alterTableColRenameAndDataTypeChangeModel.columnName, + alterTableColRenameAndDataTypeChangeModel.newColumnName) { + + override def processMetadata(sparkSession: SparkSession): Seq[Row] = { + val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) + val tableName = alterTableColRenameAndDataTypeChangeModel.tableName + val dbName = alterTableColRenameAndDataTypeChangeModel.databaseName + .getOrElse(sparkSession.catalog.currentDatabase) + var isDataTypeChange = false + setAuditTable(dbName, tableName) + setAuditInfo(Map( + "column" -> alterTableColRenameAndDataTypeChangeModel.columnName, + "newColumn" -> alterTableColRenameAndDataTypeChangeModel.newColumnName, + "newType" -> alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.dataType)) + val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.COMPACTION_LOCK) + var locks = List.empty[ICarbonLock] + // get the latest carbon table and check for column existence + var carbonTable: CarbonTable = null + var timeStamp = 0L + try { + locks = AlterTableUtil + .validateTableAndAcquireLock(dbName, tableName, locksToBeAcquired)(sparkSession) + val metaStore = CarbonEnv.getInstance(sparkSession).carbonMetaStore + carbonTable = CarbonEnv.getCarbonTable(Some(dbName), tableName)(sparkSession) + if (!alterTableColRenameAndDataTypeChangeModel.isColumnRename && + !carbonTable.canAllow(carbonTable, TableOperation.ALTER_CHANGE_DATATYPE, + alterTableColRenameAndDataTypeChangeModel.columnName)) { + throw new MalformedCarbonCommandException( + "alter table change datatype is not supported for index datamap") + } + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename && + !carbonTable.canAllow(carbonTable, TableOperation.ALTER_COLUMN_RENAME, + alterTableColRenameAndDataTypeChangeModel.columnName)) { + throw new MalformedCarbonCommandException( + "alter table column rename is not supported for index datamap") + } + val operationContext = new OperationContext + operationContext.setProperty("childTableColumnRename", childTableColumnRename) + val alterTableColRenameAndDataTypeChangePreEvent = + AlterTableColRenameAndDataTypeChangePreEvent(sparkSession, carbonTable, + alterTableColRenameAndDataTypeChangeModel) + OperationListenerBus.getInstance() + .fireEvent(alterTableColRenameAndDataTypeChangePreEvent, operationContext) + val newColumnName = alterTableColRenameAndDataTypeChangeModel.newColumnName.toLowerCase + val oldColumnName = alterTableColRenameAndDataTypeChangeModel.columnName.toLowerCase + val carbonColumns = carbonTable.getCreateOrderColumn(tableName).asScala.filter(!_.isInvisible) + if (!carbonColumns.exists(_.getColName.equalsIgnoreCase(oldColumnName))) { + throwMetadataException(dbName, tableName, s"Column does not exist: $oldColumnName") + } + + val oldCarbonColumn = carbonColumns.filter(_.getColName.equalsIgnoreCase(oldColumnName)) + if (oldCarbonColumn.size != 1) { + throwMetadataException(dbName, tableName, s"Invalid Column: $oldColumnName") + } + val newColumnPrecision = alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.precision + val newColumnScale = alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.scale + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { + // validate the columns to be renamed + validColumnsForRenaming(carbonColumns, oldCarbonColumn.head, carbonTable) + // if the datatype is source datatype, then it is just a column rename operation, else set + // the isDataTypeChange flag to true + if (oldCarbonColumn.head.getDataType.getName + .equalsIgnoreCase(alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.dataType)) { + val newColumnPrecision = + alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.precision + val newColumnScale = alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.scale + // if the source datatype is decimal and there is change in precision and scale, then + // along with rename, datatype change is also required for the command, so set the + // isDataTypeChange flag to true in this case + if (oldCarbonColumn.head.getDataType.getName.equalsIgnoreCase("decimal") && + (oldCarbonColumn.head.getDataType.asInstanceOf[DecimalType].getPrecision != + newColumnPrecision || + oldCarbonColumn.head.getDataType.asInstanceOf[DecimalType].getScale != + newColumnScale)) { + isDataTypeChange = true + } + } else { + isDataTypeChange = true + } + } else { + isDataTypeChange = true + } + if (isDataTypeChange) { + validateColumnDataType(alterTableColRenameAndDataTypeChangeModel.dataTypeInfo, + oldCarbonColumn.head) + } + // read the latest schema file + val tableInfo: TableInfo = + metaStore.getThriftTableInfo(carbonTable) + // maintain the added column for schema evolution history + var addColumnSchema: ColumnSchema = null + var deletedColumnSchema: ColumnSchema = null + val schemaEvolutionEntry: SchemaEvolutionEntry = null + val columnSchemaList = tableInfo.fact_table.table_columns.asScala.filter(!_.isInvisible) + + columnSchemaList.foreach { columnSchema => + if (columnSchema.column_name.equalsIgnoreCase(oldColumnName)) { + deletedColumnSchema = columnSchema.deepCopy() + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { + // if only column rename, just get the column schema and rename, make a + // schemaEvolutionEntry + columnSchema.setColumn_name(newColumnName) + } + // if the column rename is false,it will be just datatype change only, then change the + // datatype and make an evolution entry, If both the operations are happening, then rename + // change datatype and make an evolution entry + if (isDataTypeChange) { + // if only datatype change, just get the column schema and change datatype, make a + // schemaEvolutionEntry + columnSchema.setData_type( + DataTypeConverterUtil.convertToThriftDataType( + alterTableColRenameAndDataTypeChangeModel.dataTypeInfo.dataType)) + columnSchema + .setPrecision(newColumnPrecision) + columnSchema.setScale(newColumnScale) + } + addColumnSchema = columnSchema + timeStamp = System.currentTimeMillis() + // make a new schema evolution entry after column rename or datatype change + AlterTableUtil + .addNewSchemaEvolutionEntry(schemaEvolutionEntry, timeStamp, addColumnSchema, + deletedColumnSchema) + } + } + + // modify the table Properties with new column name if column rename happened + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { + AlterTableUtil + .modifyTablePropertiesAfterColumnRename(tableInfo.fact_table.tableProperties.asScala, + oldColumnName, newColumnName) + } + updateSchemaAndRefreshTable(sparkSession, + carbonTable, + tableInfo, + addColumnSchema, + schemaEvolutionEntry) + val alterTableColRenameAndDataTypeChangePostEvent + : AlterTableColRenameAndDataTypeChangePostEvent = + AlterTableColRenameAndDataTypeChangePostEvent(sparkSession, carbonTable, + alterTableColRenameAndDataTypeChangeModel) + OperationListenerBus.getInstance + .fireEvent(alterTableColRenameAndDataTypeChangePostEvent, operationContext) + if (isDataTypeChange) { + LOGGER + .info(s"Alter table for column rename or data type change is successful for table " + + s"$dbName.$tableName") + } + if (alterTableColRenameAndDataTypeChangeModel.isColumnRename) { + LOGGER.info(s"Alter table for column rename is successful for table $dbName.$tableName") + } + } catch { + case e: Exception => + if (carbonTable != null) { + AlterTableUtil + .revertColumnRenameAndDataTypeChanges(dbName, tableName, timeStamp)(sparkSession) + } + if (isDataTypeChange) { + throwMetadataException(dbName, tableName, + s"Alter table data type change operation failed: ${ e.getMessage }") + } else { + throwMetadataException(dbName, tableName, + s"Alter table data type change or column rename operation failed: ${ e.getMessage }") + } + } finally { + // release lock after command execution completion + AlterTableUtil.releaseLocks(locks) + } + Seq.empty + } + + /** + * This method update the schema info and refresh the table + * + * @param sparkSession + * @param carbonTable carbonTable + * @param tableInfo tableInfo + * @param addColumnSchema added column schema + * @param schemaEvolutionEntryList new SchemaEvolutionEntry + */ + private def updateSchemaAndRefreshTable(sparkSession: SparkSession, + carbonTable: CarbonTable, + tableInfo: TableInfo, + addColumnSchema: ColumnSchema, + schemaEvolutionEntry: SchemaEvolutionEntry): Unit = { + val schemaConverter = new ThriftWrapperSchemaConverterImpl + val a = List(schemaConverter.fromExternalToWrapperColumnSchema(addColumnSchema)) + val (tableIdentifier, schemaParts, cols) = AlterTableUtil.updateSchemaInfo( + carbonTable, schemaEvolutionEntry, tableInfo, Some(a))(sparkSession) + sparkSession.sessionState.catalog.asInstanceOf[CarbonSessionCatalog] + .alterColumnChangeDataType(tableIdentifier, schemaParts, cols) + sparkSession.catalog.refreshTable(tableIdentifier.quotedString) + } + + /** + * This method will validate a column for its data type and check whether the column data type + * can be modified and update if conditions are met. + */ + private def validateColumnDataType( + dataTypeInfo: DataTypeInfo, + carbonColumn: CarbonColumn): Unit = { + carbonColumn.getDataType.getName match { --- End diff -- it is better to if check on `carbonColumn.getDataType` instead of matching strings --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243248408 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/StreamingTableStrategy.scala --- @@ -54,10 +54,10 @@ private[sql] class StreamingTableStrategy(sparkSession: SparkSession) extends Sp new TableIdentifier(model.tableName, model.databaseName), "Alter table drop column") Nil - case CarbonAlterTableDataTypeChangeCommand(model) => + case CarbonAlterTableColRenameDataTypeChangeCommand(model, _) => rejectIfStreamingTable( new TableIdentifier(model.tableName, model.databaseName), - "Alter table change datatype") + "Alter table change datatype or column rename") --- End diff -- I think you can know whether it is rename by `CarbonAlterTableColRenameDataTypeChangeCommand(model, rename)`, so you can use different message --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243248566 --- Diff: integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala --- @@ -269,12 +269,50 @@ object AlterTableUtil { } } } - metastore + metaStore .revertTableSchemaInAlterFailure(carbonTable.getCarbonTableIdentifier, thriftTable, carbonTable.getAbsoluteTableIdentifier)(sparkSession) } } + /** + * This method modifies the table properties if column rename happened + * + * @param tableProperties --- End diff -- add comment for all parameter --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2990#discussion_r243250452 --- Diff: integration/spark2/src/test/scala/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapSuite.scala --- @@ -548,11 +548,16 @@ class BloomCoarseGrainDataMapSuite extends QueryTest with BeforeAndAfterAll with | USING 'bloomfilter' | DMProperties( 'INDEX_COLUMNS'='city,id', 'BLOOM_SIZE'='640000') """.stripMargin) - val exception: MalformedCarbonCommandException = intercept[MalformedCarbonCommandException] { + val exception1: MalformedCarbonCommandException = intercept[MalformedCarbonCommandException] { --- End diff -- use a proper name --- |
Free forum by Nabble | Edit this page |