Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157675931 --- Diff: core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java --- @@ -42,6 +42,17 @@ */ List<Blocklet> prune(FilterResolverIntf filterExp); + // TODO Move this method to Abstract class --- End diff -- In FG implementation I already created Abstract classes , so when we merge it we can move there --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157676422 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java --- @@ -102,18 +105,26 @@ public BlockletDataMap get(TableBlockIndexUniqueIdentifier identifier) } if (missedIdentifiers.size() > 0) { Map<String, SegmentIndexFileStore> segmentIndexFileStoreMap = new HashMap<>(); + Map<String, PartitionMapFileStore> partitionFileStoreMap = new HashMap<>(); for (TableBlockIndexUniqueIdentifier identifier: missedIdentifiers) { SegmentIndexFileStore indexFileStore = segmentIndexFileStoreMap.get(identifier.getSegmentId()); + PartitionMapFileStore partitionFileStore = + partitionFileStoreMap.get(identifier.getSegmentId()); + String segmentPath = CarbonTablePath --- End diff -- change like: ``` String segmentPath = CarbonTablePath.getSegmentPath( identifier.getAbsoluteTableIdentifier().getTablePath(), identifier.getSegmentId()); ``` --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157677188 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -379,10 +397,21 @@ private void createSchema(SegmentProperties segmentProperties) throws MemoryExce new UnsafeMemoryDMStore(indexSchemas.toArray(new CarbonRowSchema[indexSchemas.size()])); } - private void createTaskMinMaxSchema(SegmentProperties segmentProperties) throws MemoryException { + private void createSummarySchema(SegmentProperties segmentProperties, List<String> partitions) --- End diff -- provide comment for this function --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157677564 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -491,6 +520,23 @@ public boolean isScanRequired(FilterResolverIntf filterExp) { return blocklets; } + @Override public List<Blocklet> prune(FilterResolverIntf filterExp, List<String> partitions) { + List<String> storedPartitions = getPartitions(); --- End diff -- provide comments for the logic inside this function --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157678533 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapModel.java --- @@ -16,18 +16,35 @@ */ package org.apache.carbondata.core.indexstore.blockletindex; +import java.util.List; + import org.apache.carbondata.core.datamap.dev.DataMapModel; public class BlockletDataMapModel extends DataMapModel { --- End diff -- Please provide comment for this class --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/892/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1674 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/2403/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/2118/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157741064 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java --- @@ -102,18 +105,26 @@ public BlockletDataMap get(TableBlockIndexUniqueIdentifier identifier) } if (missedIdentifiers.size() > 0) { Map<String, SegmentIndexFileStore> segmentIndexFileStoreMap = new HashMap<>(); + Map<String, PartitionMapFileStore> partitionFileStoreMap = new HashMap<>(); for (TableBlockIndexUniqueIdentifier identifier: missedIdentifiers) { SegmentIndexFileStore indexFileStore = segmentIndexFileStoreMap.get(identifier.getSegmentId()); + PartitionMapFileStore partitionFileStore = + partitionFileStoreMap.get(identifier.getSegmentId()); + String segmentPath = CarbonTablePath --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157740999 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java --- @@ -71,12 +72,14 @@ public BlockletDataMap get(TableBlockIndexUniqueIdentifier identifier) BlockletDataMap dataMap = (BlockletDataMap) lruCache.get(lruCacheKey); if (dataMap == null) { try { + String segmentPath = CarbonTablePath --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157741759 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -379,10 +397,21 @@ private void createSchema(SegmentProperties segmentProperties) throws MemoryExce new UnsafeMemoryDMStore(indexSchemas.toArray(new CarbonRowSchema[indexSchemas.size()])); } - private void createTaskMinMaxSchema(SegmentProperties segmentProperties) throws MemoryException { + private void createSummarySchema(SegmentProperties segmentProperties, List<String> partitions) --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157742667 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -491,6 +520,23 @@ public boolean isScanRequired(FilterResolverIntf filterExp) { return blocklets; } + @Override public List<Blocklet> prune(FilterResolverIntf filterExp, List<String> partitions) { + List<String> storedPartitions = getPartitions(); --- End diff -- ok, added --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157742911 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapModel.java --- @@ -16,18 +16,35 @@ */ package org.apache.carbondata.core.indexstore.blockletindex; +import java.util.List; + import org.apache.carbondata.core.datamap.dev.DataMapModel; public class BlockletDataMapModel extends DataMapModel { --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/917/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1674 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/2419/ --- |
In reply to this post by qiuchenjian-2
Github user gvramana commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1674#discussion_r157829858 --- Diff: core/src/main/java/org/apache/carbondata/core/metadata/PartitionMapFileStore.java --- @@ -176,13 +179,87 @@ public PartitionMapper readPartitionMap(String partitionMapPath) { public void readAllPartitionsOfSegment(String segmentPath) { CarbonFile[] partitionFiles = getPartitionFiles(segmentPath); if (partitionFiles != null && partitionFiles.length > 0) { + partionedSegment = true; for (CarbonFile file : partitionFiles) { PartitionMapper partitionMapper = readPartitionMap(file.getAbsolutePath()); partitionMap.putAll(partitionMapper.getPartitionMap()); } } } + public boolean isPartionedSegment() { + return partionedSegment; + } + + /** + * Drops the partitions from the partition mapper file of the segment and writes to a new file. + * @param segmentPath + * @param partitionsToDrop + * @param uniqueId + * @throws IOException + */ + public void dropPartitions(String segmentPath, List<String> partitionsToDrop, String uniqueId) + throws IOException { + readAllPartitionsOfSegment(segmentPath); + List<String> indexesToDrop = new ArrayList<>(); + for (Map.Entry<String, List<String>> entry: partitionMap.entrySet()) { + for (String partition: partitionsToDrop) { + if (entry.getValue().contains(partition)) { + indexesToDrop.add(entry.getKey()); + } + } + } + if (indexesToDrop.size() > 0) { + // Remove the indexes from partition map + for (String indexToDrop : indexesToDrop) { + partitionMap.remove(indexToDrop); + } + PartitionMapper mapper = new PartitionMapper(); + mapper.setPartitionMap(partitionMap); + String path = segmentPath + "/" + uniqueId + CarbonTablePath.PARTITION_MAP_EXT; + writePartitionFile(mapper, path); + } + } + + /** + * It deletes the old partition mapper files in case of success. And in case of failure it removes + * the old new file. + * @param segmentPath + * @param uniqueId + * @param success + */ + public void commitPartitions(String segmentPath, final String uniqueId, boolean success) { + CarbonFile carbonFile = FileFactory.getCarbonFile(segmentPath); --- End diff -- Locks are not taken while dropping partitions in Hive metastore, don't we require to take it? --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1674 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/2433/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/2149/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Success with Spark 2.2.0, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/937/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1674 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/2166/ --- |
Free forum by Nabble | Edit this page |