Github user zzcclp commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r152730693 --- Diff: integration/spark2/src/main/spark2.1/CarbonSessionState.scala --- @@ -107,15 +110,14 @@ class CarbonSessionCatalog( carbonEnv.carbonMetastore. checkSchemasModifiedTimeAndReloadTables(storePath) - val tableMeta = carbonEnv.carbonMetastore - .getTableFromMetadataCache(carbonDatasourceHadoopRelation.carbonTable.getDatabaseName, - carbonDatasourceHadoopRelation.carbonTable.getFactTableName) - if (tableMeta.isEmpty || (tableMeta.isDefined && - tableMeta.get.carbonTable.getTableLastUpdatedTime != - carbonDatasourceHadoopRelation.carbonTable.getTableLastUpdatedTime)) { + val table = carbonEnv.carbonMetastore.getTableFromMetadataCache( + carbonDatasourceHadoopRelation.carbonTable.getDatabaseName, + carbonDatasourceHadoopRelation.carbonTable.getFactTableName) + if (table.isEmpty || (table.isDefined && + table.get.carbonTable.getTableLastUpdatedTime != --- End diff -- wrong indent --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on the issue:
https://github.com/apache/carbondata/pull/1469 Retest this please --- |
In reply to this post by qiuchenjian-2
Github user zzcclp commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r152921148 --- Diff: integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CarbonV1toV3CompatabilityTestCase.scala --- @@ -49,8 +49,10 @@ class CarbonV1toV3CompatabilityTestCase extends QueryTest with BeforeAndAfterAll .getOrCreateCarbonSession(storeLocation, metaLocation).asInstanceOf[CarbonSession] println("store path from env : " + CarbonEnv.getInstance(localspark).storePath) localspark.sparkContext.setLogLevel("WARN") - localspark.sessionState.asInstanceOf[CarbonSessionState].metadataHive - .runSqlHive( + localspark.asInstanceOf[CarbonSession].asInstanceOf[CarbonSession].sharedState.externalCatalog + .asInstanceOf[HiveExternalCatalog].client.runSqlHive( --- End diff -- I just commented this line temporarily, use spark.run to run this sql, because class HiveExternalCatalog cannot be accessed in non-org.apache.spark package. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Failed with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/376/ --- |
In reply to this post by qiuchenjian-2
Github user zzcclp commented on the issue:
https://github.com/apache/carbondata/pull/1469 @sounakr @ravipesala @chenliang613 I have commited code to fix some minor issues, please review. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1469 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1867/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1448/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1451/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1469 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1870/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1469 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1871/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1454/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1469 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1873/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1455/ --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on the issue:
https://github.com/apache/carbondata/pull/1469 Retest this please --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1469 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1457/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153060899 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/optimizer/CarbonDecoderOptimizerHelper.scala --- @@ -59,6 +62,17 @@ case class CarbonDictionaryTempDecoder( class CarbonDecoderProcessor { + val rm = universe.runtimeMirror(getClass.getClassLoader) + + def getField[T: TypeTag: reflect.ClassTag]( name: String, obj: T): Any = { --- End diff -- Remove this method as it seems not used. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153060970 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/optimizer/CarbonDecoderOptimizerHelper.scala --- @@ -84,7 +98,16 @@ class CarbonDecoderProcessor { } nodeList.add(ArrayCarbonNode(nodeListSeq)) case e: UnaryNode => process(e.child, nodeList) - case i: InsertIntoTable => process(i.child, nodeList) + case i: InsertIntoTable => + var sparkVersion21: Boolean = false + + sparkVersion21 = !CarbonReflectionUtils.hasField("query", InsertIntoTable) --- End diff -- Based on the version availability from spark check the version not depends on the field. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153061017 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala --- @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.util + +import java.util + +import scala.reflect.runtime._ +import scala.reflect.runtime.universe._ + +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan + +import org.apache.carbondata.common.logging.LogServiceFactory + +/** + * Reflection APIs + */ + +object CarbonReflectionUtils { + + private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) + + private val rm = universe.runtimeMirror(getClass.getClassLoader) + + /** + * Returns the field val from a object through reflection. + * @param name - name of the field being retrieved. + * @param obj - Object from which the field has to be retrieved. + * @tparam T + * @return + */ + def getField[T: TypeTag: reflect.ClassTag](name: String, obj: T): Any = { + val im = rm.reflect(obj) + + im.symbol.typeSignature.members.find( + _.name.toString.equals(name)).map( + l => im.reflectField(l.asTerm).get.asInstanceOf[LogicalPlan] + ).getOrElse(null) + } + + def hasField[T: TypeTag: reflect.ClassTag](name: String, obj: T): Boolean = { --- End diff -- remove this method as version check should be from spark --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153061021 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala --- @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.util + +import java.util + +import scala.reflect.runtime._ +import scala.reflect.runtime.universe._ + +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan + +import org.apache.carbondata.common.logging.LogServiceFactory + +/** + * Reflection APIs + */ + +object CarbonReflectionUtils { + + private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) + + private val rm = universe.runtimeMirror(getClass.getClassLoader) + + /** + * Returns the field val from a object through reflection. + * @param name - name of the field being retrieved. + * @param obj - Object from which the field has to be retrieved. + * @tparam T + * @return + */ + def getField[T: TypeTag: reflect.ClassTag](name: String, obj: T): Any = { + val im = rm.reflect(obj) + + im.symbol.typeSignature.members.find( + _.name.toString.equals(name)).map( + l => im.reflectField(l.asTerm).get.asInstanceOf[LogicalPlan] --- End diff -- no need to type cast --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1469#discussion_r153061030 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/util/CarbonReflectionUtils.scala --- @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.util + +import java.util + +import scala.reflect.runtime._ +import scala.reflect.runtime.universe._ + +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan + +import org.apache.carbondata.common.logging.LogServiceFactory + +/** + * Reflection APIs + */ + +object CarbonReflectionUtils { + + private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) + + private val rm = universe.runtimeMirror(getClass.getClassLoader) + + /** + * Returns the field val from a object through reflection. + * @param name - name of the field being retrieved. + * @param obj - Object from which the field has to be retrieved. + * @tparam T + * @return + */ + def getField[T: TypeTag: reflect.ClassTag](name: String, obj: T): Any = { + val im = rm.reflect(obj) + + im.symbol.typeSignature.members.find( + _.name.toString.equals(name)).map( --- End diff -- move this line up --- |
Free forum by Nabble | Edit this page |