Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2197#discussion_r183286495 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -654,6 +656,7 @@ public boolean isScanRequired(FilterResolverIntf filterExp) { startIndex++; } } + ExplainCollector.setTotalBlocklets(numBlocklets); --- End diff -- This is just for one datamap blocklets, where are you summing all bloclklets? --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2197#discussion_r183286607 --- Diff: core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java --- @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.profiler; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.core.metadata.schema.table.DataMapSchema; + +/** + * An information collector used for EXPLAIN command, to print out + * SQL rewrite and pruning information + */ +@InterfaceAudience.Internal +public class ExplainCollector { + + private static final ThreadLocal<ExplainCollector> explainProfiler = new ThreadLocal<>(); + + private List<String> olapDataMapProviders = new ArrayList<>(); + private List<String> olapDataMapNames = new ArrayList<>(); + + // mapping of table name to pruning info + private Map<String, TablePruningInfo> scans = new ConcurrentHashMap<>(); + + public void recordMatchedOlapDataMap(String dataMapProvider, String dataMapName) { + Objects.requireNonNull(dataMapProvider); + Objects.requireNonNull(dataMapName); + olapDataMapProviders.add(dataMapProvider); + olapDataMapNames.add(dataMapName); + } + + public static boolean enabled() { + return explainProfiler.get() != null; + } + + public static void setup() { + explainProfiler.set(new ExplainCollector()); + } + + public static ExplainCollector get() { + return explainProfiler.get(); + } + + public static void addPruningInfo(String tableName) { + if (enabled()) { + ExplainCollector profiler = get(); + if (!profiler.scans.containsKey(tableName)) { + profiler.scans.put(tableName, new TablePruningInfo()); + } + } + } + + public static void setFilterStatement(String filterStatement) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setFilterStatement(filterStatement); + } + } + + public static void recordDefaultDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterDefaultPruning(dataMapSchema, numBlocklets); + } + } + + public static void recordCGDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterCGPruning(dataMapSchema, numBlocklets); + } + } + + public static void recordFGDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterFGPruning(dataMapSchema, numBlocklets); + } + } + + public static void setTotalBlocklets(int totalBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setTotalBlocklets(totalBlocklets); + } + } + + /** + * Return the current TablePruningInfo (It is the last one in the map, since it is in + * single thread) + */ + private static TablePruningInfo getCurrentTablePruningInfo() { --- End diff -- ok, I will add CarbonTable parameter in all functions in this class --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2197#discussion_r183286723 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java --- @@ -654,6 +656,7 @@ public boolean isScanRequired(FilterResolverIntf filterExp) { startIndex++; } } + ExplainCollector.setTotalBlocklets(numBlocklets); --- End diff -- ok. I will change setTotalBlocklets to sum it --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2197#discussion_r183287439 --- Diff: core/src/main/java/org/apache/carbondata/core/profiler/ExplainCollector.java --- @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.profiler; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.core.metadata.schema.table.DataMapSchema; + +/** + * An information collector used for EXPLAIN command, to print out + * SQL rewrite and pruning information + */ +@InterfaceAudience.Internal +public class ExplainCollector { + + private static final ThreadLocal<ExplainCollector> explainProfiler = new ThreadLocal<>(); + + private List<String> olapDataMapProviders = new ArrayList<>(); + private List<String> olapDataMapNames = new ArrayList<>(); + + // mapping of table name to pruning info + private Map<String, TablePruningInfo> scans = new ConcurrentHashMap<>(); + + public void recordMatchedOlapDataMap(String dataMapProvider, String dataMapName) { + Objects.requireNonNull(dataMapProvider); + Objects.requireNonNull(dataMapName); + olapDataMapProviders.add(dataMapProvider); + olapDataMapNames.add(dataMapName); + } + + public static boolean enabled() { + return explainProfiler.get() != null; + } + + public static void setup() { + explainProfiler.set(new ExplainCollector()); + } + + public static ExplainCollector get() { + return explainProfiler.get(); + } + + public static void addPruningInfo(String tableName) { + if (enabled()) { + ExplainCollector profiler = get(); + if (!profiler.scans.containsKey(tableName)) { + profiler.scans.put(tableName, new TablePruningInfo()); + } + } + } + + public static void setFilterStatement(String filterStatement) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setFilterStatement(filterStatement); + } + } + + public static void recordDefaultDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterDefaultPruning(dataMapSchema, numBlocklets); + } + } + + public static void recordCGDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterCGPruning(dataMapSchema, numBlocklets); + } + } + + public static void recordFGDataMapPruning(DataMapSchema dataMapSchema, int numBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setNumBlockletsAfterFGPruning(dataMapSchema, numBlocklets); + } + } + + public static void setTotalBlocklets(int totalBlocklets) { + if (enabled()) { + TablePruningInfo scan = getCurrentTablePruningInfo(); + scan.setTotalBlocklets(totalBlocklets); + } + } + + /** + * Return the current TablePruningInfo (It is the last one in the map, since it is in + * single thread) + */ + private static TablePruningInfo getCurrentTablePruningInfo() { --- End diff -- Because this ExplainCollector is used when `queryExecution.toRdd.partitions` is invoked in CarbonExplainCommand. It will process relation one by one, so it is like a Stack. The last TablePruningInfo is for current table. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/5317/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4154/ --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on the issue:
https://github.com/apache/carbondata/pull/2197 retest this please --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/5334/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4166/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4178/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2197 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4495/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2197 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4496/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/5355/ --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on the issue:
https://github.com/apache/carbondata/pull/2197 retest this please --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4234/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/5401/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4258/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2197 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4565/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/5435/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2197 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/4274/ --- |
Free forum by Nabble | Edit this page |