Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149993949 --- Diff: integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala --- @@ -47,10 +47,11 @@ object TestQueryExecutor { private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) val projectPath = new File(this.getClass.getResource("/").getPath + "../../../..") - .getCanonicalPath + .getCanonicalPath.replaceAll("\\\\", "/") --- End diff -- Don't change if not required --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149994202 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCatalystOperators.scala --- @@ -70,6 +71,14 @@ object GetDB { dbName.getOrElse( sparkSession.sessionState.catalog.asInstanceOf[HiveSessionCatalog].getCurrentDatabase) } + + def getDatabaseLocation(dbName: String, sparkSession: SparkSession, --- End diff -- Add some comment --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149995554 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala --- @@ -48,7 +48,8 @@ case class CarbonDatasourceHadoopRelation( isSubquery: ArrayBuffer[Boolean] = new ArrayBuffer[Boolean]()) extends BaseRelation with InsertableRelation { - lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.fromTablePath(paths.head) + lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.from(paths.head, + parameters("dbname"), parameters("tablename")) --- End diff -- Check the camel case of parameters, please debug and verify oce --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149996665 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala --- @@ -37,11 +37,13 @@ case class CarbonCreateTableCommand( } override def processSchema(sparkSession: SparkSession): Seq[Row] = { - val storePath = CarbonEnv.getInstance(sparkSession).storePath + var storePath = CarbonEnv.getInstance(sparkSession).storePath CarbonEnv.getInstance(sparkSession).carbonMetastore. - checkSchemasModifiedTimeAndReloadTables(storePath) + checkSchemasModifiedTimeAndReloadTables() val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) cm.databaseName = GetDB.getDatabaseName(cm.databaseNameOp, sparkSession) + storePath = GetDB.getDatabaseLocation(cm.databaseName, sparkSession, storePath) --- End diff -- assign to another variable with proper name --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149997215 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDropTableCommand.scala --- @@ -43,24 +44,31 @@ case class CarbonDropTableCommand( override def processSchema(sparkSession: SparkSession): Seq[Row] = { val LOGGER: LogService = LogServiceFactory.getLogService(this.getClass.getCanonicalName) val dbName = GetDB.getDatabaseName(databaseNameOp, sparkSession) - val identifier = TableIdentifier(tableName, Option(dbName)) val carbonTableIdentifier = new CarbonTableIdentifier(dbName, tableName, "") val locksToBeAcquired = List(LockUsage.METADATA_LOCK, LockUsage.DROP_TABLE_LOCK) val carbonEnv = CarbonEnv.getInstance(sparkSession) val catalog = carbonEnv.carbonMetastore - val tableIdentifier = - AbsoluteTableIdentifier.from(CarbonEnv.getInstance(sparkSession).storePath, - dbName.toLowerCase, tableName.toLowerCase) - catalog.checkSchemasModifiedTimeAndReloadTables(tableIdentifier.getStorePath) + // get the absolute table identifier to drop the table. + val metadataCache = catalog.getTableFromMetadataCache(dbName, tableName) + val absoluteTableIdentifier = metadataCache match { + case Some(tableMeta) => + tableMeta.carbonTable.getAbsoluteTableIdentifier + case None => + val storePath = GetDB.getDatabaseLocation(dbName, sparkSession, --- End diff -- rename to dbpath --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149997376 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonDropTableCommand.scala --- @@ -80,10 +88,21 @@ case class CarbonDropTableCommand( override def processData(sparkSession: SparkSession): Seq[Row] = { // delete the table folder val dbName = GetDB.getDatabaseName(databaseNameOp, sparkSession) - val tableIdentifier = - AbsoluteTableIdentifier.from(CarbonEnv.getInstance(sparkSession).storePath, dbName, tableName) + // get the absolute table identifier to drop the table. + val carbonEnv = CarbonEnv.getInstance(sparkSession) + val catalog = carbonEnv.carbonMetastore + val metadataCache = catalog.getTableFromMetadataCache(dbName, tableName) + val absoluteTableIdentifier = metadataCache match { + case Some(tableMeta) => + tableMeta.carbonTable.getAbsoluteTableIdentifier + case None => + val storePath = GetDB.getDatabaseLocation(dbName, sparkSession, --- End diff -- Change name to dbpath in all places --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r149998981 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableRenameCommand.scala --- @@ -93,6 +96,22 @@ private[sql] case class CarbonAlterTableRenameCommand( timeStamp = System.currentTimeMillis() schemaEvolutionEntry.setTime_stamp(timeStamp) renameBadRecords(oldTableName, newTableName, oldDatabaseName) + --- End diff -- Why the order changed? --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1536/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150004203 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala --- @@ -399,16 +396,31 @@ class CarbonFileMetastore extends CarbonMetaStore { FileFactory.isFileExist(tablePath, fileType) } catch { case e: Exception => - false + // this is identify the stale tables, table physically present but removed from hive + try { --- End diff -- remove and try whether all tests passes --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1540/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/952/ --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150171389 --- Diff: core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java --- @@ -1377,6 +1377,10 @@ public static final String CARBON_USE_BLOCKLET_DISTRIBUTION_DEFAULT = "true"; + public static final String CARBON_UPDATE_SYNC_FOLDER = "carbon.update.sync.folder"; --- End diff -- Fixed --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150171431 --- Diff: core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java --- @@ -1377,6 +1377,10 @@ public static final String CARBON_USE_BLOCKLET_DISTRIBUTION_DEFAULT = "true"; + public static final String CARBON_UPDATE_SYNC_FOLDER = "carbon.update.sync.folder"; + + public static final String CARBON_UPDATE_SYNC_FOLDER_DEFAULT = "/tmp/carbondata"; --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150172471 --- Diff: core/src/main/java/org/apache/carbondata/core/locks/HdfsFileLock.java --- @@ -47,26 +44,12 @@ private static String tmpPath; - static { --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150173260 --- Diff: core/src/main/java/org/apache/carbondata/core/locks/LocalFileLock.java --- @@ -90,8 +90,9 @@ public LocalFileLock(String lockFileLocation, String lockFile) { * @param tableIdentifier * @param lockFile */ - public LocalFileLock(CarbonTableIdentifier tableIdentifier, String lockFile) { - this(tableIdentifier.getDatabaseName() + CarbonCommonConstants.FILE_SEPARATOR + tableIdentifier + public LocalFileLock(AbsoluteTableIdentifier tableIdentifier, String lockFile) { + this(tableIdentifier.getCarbonTableIdentifier().getDatabaseName() --- End diff -- Fixed --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1567/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/962/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1418 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/1576/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1418 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/969/ --- |
In reply to this post by qiuchenjian-2
Github user mohammadshahidkhan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1418#discussion_r150248529 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/CarbonCreateTableCommand.scala --- @@ -37,11 +37,13 @@ case class CarbonCreateTableCommand( } override def processSchema(sparkSession: SparkSession): Seq[Row] = { - val storePath = CarbonEnv.getInstance(sparkSession).storePath + var storePath = CarbonEnv.getInstance(sparkSession).storePath CarbonEnv.getInstance(sparkSession).carbonMetastore. - checkSchemasModifiedTimeAndReloadTables(storePath) + checkSchemasModifiedTimeAndReloadTables() val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName) cm.databaseName = GetDB.getDatabaseName(cm.databaseNameOp, sparkSession) + storePath = GetDB.getDatabaseLocation(cm.databaseName, sparkSession, storePath) --- End diff -- fixed --- |
Free forum by Nabble | Edit this page |