Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1595 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1792/ --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155427034 --- Diff: integration/spark2/src/main/spark2.2/CarbonSessionState.scala --- @@ -256,6 +292,57 @@ class CarbonSqlAstBuilder(conf: SQLConf, parser: CarbonSpark2SqlParser) extends } } + override def visitChangeColumn(ctx: ChangeColumnContext): LogicalPlan = { + + val newColumn = visitColType(ctx.colType) + if (!ctx.identifier.getText.equalsIgnoreCase(newColumn.name)) { + throw new MalformedCarbonCommandException( + "Column names provided are different. Both the column names should be same") + } + + val (typeString, values) : (String, Option[List[(Int, Int)]]) = newColumn.dataType match { + case d:DecimalType => ("decimal", Some(List((d.precision, d.scale)))) + case _ => (newColumn.dataType.typeName.toLowerCase, None) + } + + val alterTableChangeDataTypeModel = + AlterTableDataTypeChangeModel(new CarbonSpark2SqlParser().parseDataType(typeString, values), + new CarbonSpark2SqlParser() + .convertDbNameToLowerCase(Option(ctx.tableIdentifier().db).map(_.getText)), + ctx.tableIdentifier().table.getText.toLowerCase, + ctx.identifier.getText.toLowerCase, + newColumn.name.toLowerCase) + + CarbonAlterTableDataTypeChangeCommand(alterTableChangeDataTypeModel) + } + + + override def visitAddTableColumns(ctx: AddTableColumnsContext): LogicalPlan = { --- End diff -- We can catch the command in strategy. But as AlterTableChangeColumnCommand is only added in Spark 2.2. As our strategy is common for spark2.2. and spark2.1 it will be difficult to match the case of AlterTableAddColumnsCommand. Currently the SparkSqlAstBuilder is separated in 2.1 and 2.2 in CarbonSessionState.scala so it was easier to place it over there. --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155427042 --- Diff: integration/spark2/src/main/spark2.2/CarbonSessionState.scala --- @@ -256,6 +292,57 @@ class CarbonSqlAstBuilder(conf: SQLConf, parser: CarbonSpark2SqlParser) extends } } + override def visitChangeColumn(ctx: ChangeColumnContext): LogicalPlan = { --- End diff -- We can catch the command in strategy. But as AlterTableAddColumnsCommand is only added in Spark 2.2. As our strategy is common for spark2.2. and spark2.1 it will be difficult to match the case of AlterTableAddColumnsCommand. Currently the SparkSqlAstBuilder is separated in 2.1 and 2.2 in CarbonSessionState.scala so it was easier to place it over there. --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155430622 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonPreAggregateRules.scala --- @@ -1223,5 +1224,63 @@ case class CarbonPreInsertionCasts(sparkSession: SparkSession) extends Rule[Logi } } + /** + * Transform the logical plan with average(col1) aggregation type to sum(col1) and count(col1). + * + * @param logicalPlan + * @return + */ + private def transformAggregatePlan(logicalPlan: LogicalPlan): LogicalPlan = { --- End diff -- Removed. Not needed. --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on the issue:
https://github.com/apache/carbondata/pull/1595 Retest this please --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155438257 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -49,10 +49,23 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) + val tableRelation = if (sparkSession.version.startsWith("2.1")) { + relation + } else if (sparkSession.version.startsWith("2.2")) { + alias match { + case Some(a) => + CarbonReflectionUtils.getSubqueryAlias(sparkSession, alias, relation + , Some(table.tableIdentifier)) --- End diff -- Format properly ``` CarbonReflectionUtils.getSubqueryAlias( sparkSession, alias, relation, Some(table.tableIdentifier)) ``` --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155438305 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -152,7 +165,19 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) // include tuple id in subquery - Project(projList, relation) + if (sparkSession.version.startsWith("2.1")) { + Project(projList, relation) + } else if (sparkSession.version.startsWith("2.2")) { + alias match { + case Some(a) => + val subqueryAlias = CarbonReflectionUtils.getSubqueryAlias(sparkSession, alias --- End diff -- Format properly ``` CarbonReflectionUtils.getSubqueryAlias( sparkSession, alias, relation, Some(table.tableIdentifier)) ``` --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1595 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/548/ --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155442391 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -152,7 +165,19 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) // include tuple id in subquery - Project(projList, relation) + if (sparkSession.version.startsWith("2.1")) { + Project(projList, relation) + } else if (sparkSession.version.startsWith("2.2")) { + alias match { + case Some(a) => + val subqueryAlias = CarbonReflectionUtils.getSubqueryAlias(sparkSession, alias --- End diff -- Done --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155442404 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -49,10 +49,23 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) + val tableRelation = if (sparkSession.version.startsWith("2.1")) { + relation + } else if (sparkSession.version.startsWith("2.2")) { + alias match { + case Some(a) => + CarbonReflectionUtils.getSubqueryAlias(sparkSession, alias, relation + , Some(table.tableIdentifier)) --- End diff -- Done --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on the issue:
https://github.com/apache/carbondata/pull/1595 Retest this please. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1595 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/552/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1595 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/556/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1595 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/2175/ --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155458074 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala --- @@ -1168,7 +1168,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser { * @param values * @return */ - protected def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = { + def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = { --- End diff -- seems wrong identation --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155458141 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala --- @@ -413,19 +414,35 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy { relation: BaseRelation, predicates: Seq[Expression]): (Seq[Expression], Seq[Filter]) = { + def isComplexAttribute(attribute: Attribute) = attribute.dataType match { --- End diff -- should be private --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155458976 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -49,10 +49,26 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) + val tableRelation = if (sparkSession.version.startsWith("2.1")) { --- End diff -- use org.apache.spark.SPARK_VERSION instead of using sparkSession object --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155463016 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala --- @@ -1168,7 +1168,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser { * @param values * @return */ - protected def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = { + def parseDataType(dataType: String, values: Option[List[(Int, Int)]]): DataTypeInfo = { --- End diff -- Done --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155463910 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala --- @@ -49,10 +49,26 @@ case class CarbonIUDAnalysisRule(sparkSession: SparkSession) extends Rule[Logica val projList = Seq(UnresolvedAlias(UnresolvedStar(alias.map(Seq(_)))), tupleId) + val tableRelation = if (sparkSession.version.startsWith("2.1")) { --- End diff -- Ok. --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1595#discussion_r155463937 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala --- @@ -413,19 +414,35 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy { relation: BaseRelation, predicates: Seq[Expression]): (Seq[Expression], Seq[Filter]) = { + def isComplexAttribute(attribute: Attribute) = attribute.dataType match { --- End diff -- Done --- |
Free forum by Nabble | Edit this page |