QiangCai opened a new pull request #3867: URL: https://github.com/apache/carbondata/pull/3867 ### Why is this PR needed? There are many typos in carbondata-spark module. ### What changes were proposed in this PR? Cleanup code typo in carbondata-spark module ### Does this PR introduce any user interface change? - No, not change interface name ### Is any new testcase added? - No, not impact function. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
kevinjmh commented on a change in pull request #3867: URL: https://github.com/apache/carbondata/pull/3867#discussion_r462058720 ########## File path: integration/spark/src/main/scala/org/apache/carbondata/view/MVRefresher.scala ########## @@ -68,19 +68,19 @@ object MVRefresher { // Clean up the old invalid segment data before creating a new entry for new load. SegmentStatusManager.deleteLoadsAndUpdateMetadata(viewTable, false, null) val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(viewTableIdentifier) - // Acquire table status lock to handle concurrent dataloading + // Acquire table status lock to handle concurrent data loading val lock: ICarbonLock = segmentStatusManager.getTableStatusLock val segmentMapping: util.Map[String, util.List[String]] = new util.HashMap[String, util.List[String]] val viewManager = MVManagerInSpark.get(session) try if (lock.lockWithRetries) { - LOGGER.info("Acquired lock for mv " + viewIdentifier + " for table status updation") + LOGGER.info("Acquired lock for mv " + viewIdentifier + " for table status te") Review comment: check this ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala ########## @@ -50,7 +50,7 @@ import org.apache.carbondata.events.{IndexServerLoadEvent, OperationContext, Ope import org.apache.carbondata.hadoop.api.{CarbonInputFormat, CarbonTableInputFormat} import org.apache.carbondata.processing.exception.MultipleMatchingException import org.apache.carbondata.processing.loading.FailureCauses -import org.apache.carbondata.spark.DeleteDelataResultImpl +import org.apache.carbondata.spark.DeleteDelateResultImpl Review comment: delta ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala ########## @@ -266,9 +266,9 @@ object DeleteExecution { CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.BLOCK_ID) + CarbonCommonConstants.FACT_FILE_EXT) } - val deleteDeletaPath = CarbonUpdateUtil + val deleteDeletePath = CarbonUpdateUtil Review comment: ditto ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala ########## @@ -693,10 +693,10 @@ class CarbonSecondaryIndexOptimizer(sparkSession: SparkSession) { case sort@Sort(order, global, plan) => addProjection = true (sort, true) - case filter@Filter(condition, logicalRelation@MatchIndexableRelation(indexableRelation)) + case filter@Filter(condition, logicalRelation@MatchIndexTableRelation(indexTableRelation)) Review comment: rename is changing meaning. And actual code is to match CarbonDatasourceHadoopRelation ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA1 commented on pull request #3867: URL: https://github.com/apache/carbondata/pull/3867#issuecomment-665157764 ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA1 commented on pull request #3867: URL: https://github.com/apache/carbondata/pull/3867#issuecomment-666136864 ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kevinjmh commented on pull request #3867: URL: https://github.com/apache/carbondata/pull/3867#issuecomment-666153130 LGTM ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
QiangCai commented on a change in pull request #3867: URL: https://github.com/apache/carbondata/pull/3867#discussion_r462713015 ########## File path: integration/spark/src/main/scala/org/apache/carbondata/view/MVRefresher.scala ########## @@ -68,19 +68,19 @@ object MVRefresher { // Clean up the old invalid segment data before creating a new entry for new load. SegmentStatusManager.deleteLoadsAndUpdateMetadata(viewTable, false, null) val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(viewTableIdentifier) - // Acquire table status lock to handle concurrent dataloading + // Acquire table status lock to handle concurrent data loading val lock: ICarbonLock = segmentStatusManager.getTableStatusLock val segmentMapping: util.Map[String, util.List[String]] = new util.HashMap[String, util.List[String]] val viewManager = MVManagerInSpark.get(session) try if (lock.lockWithRetries) { - LOGGER.info("Acquired lock for mv " + viewIdentifier + " for table status updation") + LOGGER.info("Acquired lock for mv " + viewIdentifier + " for table status te") Review comment: done, change to 'update' ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala ########## @@ -50,7 +50,7 @@ import org.apache.carbondata.events.{IndexServerLoadEvent, OperationContext, Ope import org.apache.carbondata.hadoop.api.{CarbonInputFormat, CarbonTableInputFormat} import org.apache.carbondata.processing.exception.MultipleMatchingException import org.apache.carbondata.processing.loading.FailureCauses -import org.apache.carbondata.spark.DeleteDelataResultImpl +import org.apache.carbondata.spark.DeleteDelateResultImpl Review comment: done ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala ########## @@ -266,9 +266,9 @@ object DeleteExecution { CarbonUpdateUtil.getRequiredFieldFromTID(TID, TupleIdEnum.BLOCK_ID) + CarbonCommonConstants.FACT_FILE_EXT) } - val deleteDeletaPath = CarbonUpdateUtil + val deleteDeletePath = CarbonUpdateUtil Review comment: done, change to 'delta' ########## File path: integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/optimizer/CarbonSecondaryIndexOptimizer.scala ########## @@ -693,10 +693,10 @@ class CarbonSecondaryIndexOptimizer(sparkSession: SparkSession) { case sort@Sort(order, global, plan) => addProjection = true (sort, true) - case filter@Filter(condition, logicalRelation@MatchIndexableRelation(indexableRelation)) + case filter@Filter(condition, logicalRelation@MatchIndexTableRelation(indexTableRelation)) Review comment: Reverted the change. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
asfgit closed pull request #3867: URL: https://github.com/apache/carbondata/pull/3867 ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
Free forum by Nabble | Edit this page |