Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150248692 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDropTableCommand.scala --- @@ -80,10 +88,21 @@ case class CarbonDropTableCommand( override def processData(sparkSession: SparkSession): Seq[Row] = { // delete the table folder val dbName = GetDB.getDatabaseName(databaseNameOp, sparkSession) - val tableIdentifier = - AbsoluteTableIdentifier.from(CarbonEnv.getInstance(sparkSession).storePath, dbName, tableName) + // get the absolute table identifier to drop the table. + val carbonEnv = CarbonEnv.getInstance(sparkSession) + val catalog = carbonEnv.carbonMetastore + val metadataCache = catalog.getTableFromMetadataCache(dbName, tableName) + val absoluteTableIdentifier = metadataCache match { + case Some(tableMeta) => + tableMeta.carbonTable.getAbsoluteTableIdentifier + case None => + val storePath = GetDB.getDatabaseLocation(dbName, sparkSession, --- End diff -- Fixed --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/974/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1583/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/982/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/987/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1588/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/989/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1594/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1598/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1606/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1016/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1634/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1017/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1635/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/1018/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1636/ --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150442055 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala --- @@ -399,16 +396,31 @@ class CarbonFileMetastore extends CarbonMetaStore { FileFactory.isFileExist(tablePath, fileType) } catch { case e: Exception => - false + // this is identify the stale tables, table physically present but removed from hive + try { --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150442218 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala --- @@ -93,6 +96,22 @@ private[sql] case class CarbonAlterTableRenameCommand( timeStamp = System.currentTimeMillis() schemaEvolutionEntry.setTime_stamp(timeStamp) renameBadRecords(oldTableName, newTableName, oldDatabaseName) + --- End diff -- if hive and carbon both the tables points to the same location, and we do folder rename first then then hive alter command will fail. --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150442280 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala --- @@ -48,7 +48,8 @@ case class CarbonDatasourceHadoopRelation( isSubquery: ArrayBuffer[Boolean] = new ArrayBuffer[Boolean]()) extends BaseRelation with InsertableRelation { - lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.fromTablePath(paths.head) + lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.from(paths.head, + parameters("dbname"), parameters("tablename")) --- End diff -- the map parameters is case insensitive --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150442899 --- Diff: integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala --- @@ -839,14 +841,56 @@ object CommonUtil { */ def mergeIndexFiles(sparkContext: SparkContext, segmentIds: Seq[String], - storePath: String, + tablePath: String, carbonTable: CarbonTable): Unit = { if (CarbonProperties.getInstance().getProperty( CarbonCommonConstants.CARBON_MERGE_INDEX_IN_SEGMENT, CarbonCommonConstants.CARBON_MERGE_INDEX_IN_SEGMENT_DEFAULT).toBoolean) { - new CarbonMergeFilesRDD(sparkContext, AbsoluteTableIdentifier.from(storePath, + new CarbonMergeFilesRDD(sparkContext, AbsoluteTableIdentifier.from(tablePath, carbonTable.getDatabaseName, carbonTable.getFactTableName).getTablePath, segmentIds).collect() } } + + /** + * The default database and databases ending with .db will point to the + * either either "carbon.storeLocation" or "spark.sql.warehouse.dir". + * + * @param storeLocation + * @param databaseName + * @param fixedStorePath + * @return + */ + def getValidStorePath(storeLocation: String, databaseName: String, + fixedStorePath: String): String = { --- End diff -- fixed --- |
Free forum by Nabble | Edit this page |