[GitHub] [carbondata] qiuchenjian commented on a change in pull request #3150: [CARBONDATA-3309] MV datamap supports Spark 2.1

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[GitHub] [carbondata] qiuchenjian commented on a change in pull request #3150: [CARBONDATA-3309] MV datamap supports Spark 2.1

GitBox
qiuchenjian commented on a change in pull request #3150: [CARBONDATA-3309] MV datamap supports Spark 2.1
URL: https://github.com/apache/carbondata/pull/3150#discussion_r283696608
 
 

 ##########
 File path: integration/spark-common/src/main/scala/org/apache/spark/sql/util/SparkSQLUtil.scala
 ##########
 @@ -51,6 +55,100 @@ object SparkSQLUtil {
     }
   }
 
+  def invokeQueryPlannormalizeExprId(r: NamedExpression, input: AttributeSeq)
+      : NamedExpression = {
+    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+      val clazz = Utils.classForName("org.apache.spark.sql.catalyst.plans.QueryPlan")
+      clazz.getDeclaredMethod("normalizeExprId", classOf[Any], classOf[AttributeSeq]).
+        invoke(null, r, input).asInstanceOf[NamedExpression]
+    } else {
+      r
+    }
+  }
+
+  def getStatisticsObj(outputList: Seq[NamedExpression],
+                       plan: LogicalPlan, stats: Statistics,
+                       aliasMap: Option[AttributeMap[Attribute]] = None)
+  : Statistics = {
+    val className = "org.apache.spark.sql.catalyst.plans.logical.Statistics"
+    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+      val output = outputList.map(_.toAttribute)
+      val mapSeq = plan.collect { case n: logical.LeafNode => n }.map {
+        table => AttributeMap(table.output.zip(output))
+      }
+      val rewrites = mapSeq.head
+      val attributes : AttributeMap[ColumnStat] = CarbonReflectionUtils.
+        getField("attributeStats", stats).asInstanceOf[AttributeMap[ColumnStat]]
+      var attributeStats = AttributeMap(attributes.iterator
+        .map { pair => (rewrites(pair._1), pair._2) }.toSeq)
+      if (aliasMap.isDefined) {
+        attributeStats = AttributeMap(
+          attributeStats.map(pair => (aliasMap.get(pair._1), pair._2)).toSeq)
+      }
+      val hints = CarbonReflectionUtils.getField("hints", stats).asInstanceOf[Object]
+      CarbonReflectionUtils.createObject(className, stats.sizeInBytes,
+        stats.rowCount, attributeStats, hints).asInstanceOf[Statistics]
+    } else {
+      val output = outputList.map(_.name)
+      val mapSeq = plan.collect { case n: logical.LeafNode => n }.map {
+        table => table.output.map(_.name).zip(output).toMap
+      }
+      val rewrites = mapSeq.head
+      val colStats = CarbonReflectionUtils.getField("colStats", stats)
+        .asInstanceOf[Map[String, ColumnStat]]
+      var attributeStats = colStats.iterator
+        .map { pair => (rewrites(pair._1), pair._2) }.toMap
+      if (aliasMap.isDefined) {
+        val aliasMapName = aliasMap.get.map(x => (x._1.name, x._2.name))
+        attributeStats =
+          attributeStats.map(pair => (aliasMapName.getOrElse(pair._1, pair._1)
+            , pair._2))
+      }
+      CarbonReflectionUtils.createObject(className, stats.sizeInBytes,
+        stats.rowCount, attributeStats).asInstanceOf[Statistics]
+    }
+  }
+
+  def getCastObj(child: Expression, oriCast: Cast) : Cast = {
+    val className = "org.apache.spark.sql.catalyst.expressions.Cast"
+    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+      val timeZoneId = CarbonReflectionUtils.getField("timeZoneId",
+        oriCast).asInstanceOf[Object]
+      CarbonReflectionUtils.createObject(className, child, oriCast.dataType, timeZoneId).
+        _1.asInstanceOf[Cast]
+    } else {
+      CarbonReflectionUtils.createObject(className, child, oriCast.dataType).
+        _1.asInstanceOf[Cast]
+    }
+  }
+
+  def getEliminateViewObj(conf: SQLConf): Rule[LogicalPlan] = {
+    if (SparkUtil.isSparkVersionXandAbove("2.2")) {
+      val className = "org.apache.spark.sql.catalyst.analysis.EliminateView"
+      CarbonReflectionUtils.createSingleObject(className).asInstanceOf[Rule[LogicalPlan]]
+    } else {
+      EmptyRule
+    }
+  }
+
+  def getPullupCorrelatedPredicatesObj(conf: SQLConf): Rule[LogicalPlan] = {
 
 Review comment:
   done

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[hidden email]


With regards,
Apache Git Services