Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126060644 --- Diff: processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java --- @@ -84,19 +112,28 @@ measurePage = new ColumnPage[model.getMeasureCount()]; DataType[] dataTypes = model.getMeasureDataType(); for (int i = 0; i < measurePage.length; i++) { - measurePage[i] = ColumnPage.newPage(dataTypes[i], pageSize); + ColumnPage page = ColumnPage.newPage(dataTypes[i], pageSize); + page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize)); + measurePage[i] = page; } + boolean hasNoDictionary = noDictDimensionPage.length > 0; + this.key = new TablePageKey(pageSize, model.getMDKeyGenerator(), model.getSegmentProperties(), + hasNoDictionary); } /** - * Add one row to the internal store, it will be converted into columnar layout + * Add one row to the internal store * * @param rowId Id of the input row * @param row row object */ public void addRow(int rowId, CarbonRow row) throws KeyGenException { - // convert each column category + // convert each column category, update key and stats + convertToColumnar(rowId, row); + key.update(rowId, row); --- End diff -- There are 2 times mdk is generated, one inside `key.update` and and also in `convertToColumnar`. The cost of mdk generation is high so please use it only once --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126061178 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java --- @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.datastore.page.encoding; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.core.datastore.compression.Compressor; +import org.apache.carbondata.core.datastore.compression.CompressorFactory; +import org.apache.carbondata.core.datastore.page.EncodedTablePage; +import org.apache.carbondata.core.metadata.ValueEncoderMeta; +import org.apache.carbondata.core.util.CarbonMetadataUtil; +import org.apache.carbondata.format.BlockletMinMaxIndex; +import org.apache.carbondata.format.DataChunk2; +import org.apache.carbondata.format.DataChunk3; +import org.apache.carbondata.format.Encoding; +import org.apache.carbondata.format.PresenceMeta; + +/** + * Encoded measure page that include data and statistics + */ +public class EncodedMeasurePage extends EncodedColumnPage { + + private ValueEncoderMeta metaData; + + public EncodedMeasurePage(int pageSize, byte[] encodedData, ValueEncoderMeta metaData) { + super(pageSize, encodedData); + this.metaData = metaData; + this.dataChunk2 = buildDataChunk2(); + } + + @Override + public DataChunk2 buildDataChunk2() { + DataChunk2 dataChunk = new DataChunk2(); + dataChunk.min_max = new BlockletMinMaxIndex(); + dataChunk.setChunk_meta(CarbonMetadataUtil.getSnappyChunkCompressionMeta()); + dataChunk.setNumberOfRowsInpage(pageSize); + dataChunk.setData_page_length(encodedData.length); + dataChunk.setRowMajor(false); + // TODO : Change as per this encoders. + List<Encoding> encodings = new ArrayList<Encoding>(); + encodings.add(Encoding.DELTA); + dataChunk.setEncoders(encodings); + PresenceMeta presenceMeta = new PresenceMeta(); + presenceMeta.setPresent_bit_streamIsSet(true); + Compressor compressor = CompressorFactory.getInstance().getCompressor(); + presenceMeta.setPresent_bit_stream( + compressor.compressByte(metaData.getNullBitSet().toByteArray())); + dataChunk.setPresence(presenceMeta); + List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>(); + encoderMetaList.add( + ByteBuffer.wrap(metaData.serialize())); + dataChunk.setEncoder_meta(encoderMetaList); + dataChunk.min_max.addToMax_values(ByteBuffer.wrap(metaData.getMaxAsBytes())); + dataChunk.min_max.addToMin_values(ByteBuffer.wrap(metaData.getMinAsBytes())); + return dataChunk; + } + + public static DataChunk3 getDataChunk3(List<EncodedTablePage> encodedTablePageList, --- End diff -- Better move this static method to some utility from this implmentation class --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126061470 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java --- @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.datastore.page.encoding; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.core.datastore.compression.Compressor; +import org.apache.carbondata.core.datastore.compression.CompressorFactory; +import org.apache.carbondata.core.datastore.page.EncodedTablePage; +import org.apache.carbondata.core.metadata.ValueEncoderMeta; +import org.apache.carbondata.core.util.CarbonMetadataUtil; +import org.apache.carbondata.format.BlockletMinMaxIndex; +import org.apache.carbondata.format.DataChunk2; +import org.apache.carbondata.format.DataChunk3; +import org.apache.carbondata.format.Encoding; +import org.apache.carbondata.format.PresenceMeta; + +/** + * Encoded measure page that include data and statistics + */ +public class EncodedMeasurePage extends EncodedColumnPage { + + private ValueEncoderMeta metaData; + + public EncodedMeasurePage(int pageSize, byte[] encodedData, ValueEncoderMeta metaData) { + super(pageSize, encodedData); + this.metaData = metaData; + this.dataChunk2 = buildDataChunk2(); + } + + @Override + public DataChunk2 buildDataChunk2() { + DataChunk2 dataChunk = new DataChunk2(); + dataChunk.min_max = new BlockletMinMaxIndex(); + dataChunk.setChunk_meta(CarbonMetadataUtil.getSnappyChunkCompressionMeta()); + dataChunk.setNumberOfRowsInpage(pageSize); + dataChunk.setData_page_length(encodedData.length); + dataChunk.setRowMajor(false); + // TODO : Change as per this encoders. + List<Encoding> encodings = new ArrayList<Encoding>(); + encodings.add(Encoding.DELTA); + dataChunk.setEncoders(encodings); + PresenceMeta presenceMeta = new PresenceMeta(); + presenceMeta.setPresent_bit_streamIsSet(true); + Compressor compressor = CompressorFactory.getInstance().getCompressor(); + presenceMeta.setPresent_bit_stream( + compressor.compressByte(metaData.getNullBitSet().toByteArray())); + dataChunk.setPresence(presenceMeta); + List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>(); + encoderMetaList.add( + ByteBuffer.wrap(metaData.serialize())); + dataChunk.setEncoder_meta(encoderMetaList); + dataChunk.min_max.addToMax_values(ByteBuffer.wrap(metaData.getMaxAsBytes())); + dataChunk.min_max.addToMin_values(ByteBuffer.wrap(metaData.getMinAsBytes())); + return dataChunk; + } + + public static DataChunk3 getDataChunk3(List<EncodedTablePage> encodedTablePageList, + int columnIndex) throws IOException { + List<DataChunk2> dataChunksList = new ArrayList<>(encodedTablePageList.size()); + for (EncodedTablePage encodedTablePage : encodedTablePageList) { + dataChunksList.add(encodedTablePage.getMeasure(columnIndex).getDataChunk2()); + } --- End diff -- It seems code is duplicated in EncodedDimensionPage as well, please move it some common utility --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126426968 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java --- @@ -15,9 +15,25 @@ * limitations under the License. */ -package org.apache.carbondata.core.datastore.page.compression; +package org.apache.carbondata.core.datastore.page.statistics; -public interface Compression { - byte[] compress(byte[] input); - byte[] decompress(byte[] input); +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; + +public interface SimpleStatsResult { + + byte[] getMinAsBytes(); + + byte[] getMaxAsBytes(); + + Object getMin(); + + Object getMax(); + + BitSet getNullBits(); + + int getDecimal(); --- End diff -- ok --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126427005 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsVO.java --- @@ -64,15 +63,6 @@ public ColumnPageStatsVO(DataType dataType) { decimal = 0; } - public static ColumnPageStatsVO copyFrom(ValueEncoderMeta meta) { --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126427558 --- Diff: processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java --- @@ -84,19 +112,28 @@ measurePage = new ColumnPage[model.getMeasureCount()]; DataType[] dataTypes = model.getMeasureDataType(); for (int i = 0; i < measurePage.length; i++) { - measurePage[i] = ColumnPage.newPage(dataTypes[i], pageSize); + ColumnPage page = ColumnPage.newPage(dataTypes[i], pageSize); + page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize)); + measurePage[i] = page; } + boolean hasNoDictionary = noDictDimensionPage.length > 0; + this.key = new TablePageKey(pageSize, model.getMDKeyGenerator(), model.getSegmentProperties(), + hasNoDictionary); } /** - * Add one row to the internal store, it will be converted into columnar layout + * Add one row to the internal store * * @param rowId Id of the input row * @param row row object */ public void addRow(int rowId, CarbonRow row) throws KeyGenException { - // convert each column category + // convert each column category, update key and stats + convertToColumnar(rowId, row); --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126428058 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java --- @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.datastore.page.encoding; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.core.datastore.compression.Compressor; +import org.apache.carbondata.core.datastore.compression.CompressorFactory; +import org.apache.carbondata.core.datastore.page.EncodedTablePage; +import org.apache.carbondata.core.metadata.ValueEncoderMeta; +import org.apache.carbondata.core.util.CarbonMetadataUtil; +import org.apache.carbondata.format.BlockletMinMaxIndex; +import org.apache.carbondata.format.DataChunk2; +import org.apache.carbondata.format.DataChunk3; +import org.apache.carbondata.format.Encoding; +import org.apache.carbondata.format.PresenceMeta; + +/** + * Encoded measure page that include data and statistics + */ +public class EncodedMeasurePage extends EncodedColumnPage { + + private ValueEncoderMeta metaData; + + public EncodedMeasurePage(int pageSize, byte[] encodedData, ValueEncoderMeta metaData) { + super(pageSize, encodedData); + this.metaData = metaData; + this.dataChunk2 = buildDataChunk2(); + } + + @Override + public DataChunk2 buildDataChunk2() { + DataChunk2 dataChunk = new DataChunk2(); + dataChunk.min_max = new BlockletMinMaxIndex(); + dataChunk.setChunk_meta(CarbonMetadataUtil.getSnappyChunkCompressionMeta()); + dataChunk.setNumberOfRowsInpage(pageSize); + dataChunk.setData_page_length(encodedData.length); + dataChunk.setRowMajor(false); + // TODO : Change as per this encoders. + List<Encoding> encodings = new ArrayList<Encoding>(); + encodings.add(Encoding.DELTA); + dataChunk.setEncoders(encodings); + PresenceMeta presenceMeta = new PresenceMeta(); + presenceMeta.setPresent_bit_streamIsSet(true); + Compressor compressor = CompressorFactory.getInstance().getCompressor(); + presenceMeta.setPresent_bit_stream( + compressor.compressByte(metaData.getNullBitSet().toByteArray())); + dataChunk.setPresence(presenceMeta); + List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>(); + encoderMetaList.add( + ByteBuffer.wrap(metaData.serialize())); + dataChunk.setEncoder_meta(encoderMetaList); + dataChunk.min_max.addToMax_values(ByteBuffer.wrap(metaData.getMaxAsBytes())); + dataChunk.min_max.addToMin_values(ByteBuffer.wrap(metaData.getMinAsBytes())); + return dataChunk; + } + + public static DataChunk3 getDataChunk3(List<EncodedTablePage> encodedTablePageList, --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126428076 --- Diff: processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java --- @@ -84,19 +112,28 @@ measurePage = new ColumnPage[model.getMeasureCount()]; DataType[] dataTypes = model.getMeasureDataType(); for (int i = 0; i < measurePage.length; i++) { - measurePage[i] = ColumnPage.newPage(dataTypes[i], pageSize); + ColumnPage page = ColumnPage.newPage(dataTypes[i], pageSize); + page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize)); + measurePage[i] = page; } + boolean hasNoDictionary = noDictDimensionPage.length > 0; + this.key = new TablePageKey(pageSize, model.getMDKeyGenerator(), model.getSegmentProperties(), + hasNoDictionary); } /** - * Add one row to the internal store, it will be converted into columnar layout + * Add one row to the internal store * * @param rowId Id of the input row * @param row row object */ public void addRow(int rowId, CarbonRow row) throws KeyGenException { - // convert each column category + // convert each column category, update key and stats + convertToColumnar(rowId, row); + key.update(rowId, row); --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder/3000/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder/3002/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Success with Spark 1.6, Please check CI http://144.76.159.231:8080/job/ApacheCarbonPRBuilder/413/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Success with Spark 1.6, Please check CI http://144.76.159.231:8080/job/ApacheCarbonPRBuilder/414/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126616780 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java --- @@ -15,9 +15,25 @@ * limitations under the License. */ -package org.apache.carbondata.core.datastore.page.compression; +package org.apache.carbondata.core.datastore.page.statistics; -public interface Compression { - byte[] compress(byte[] input); - byte[] decompress(byte[] input); +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; + +public interface SimpleStatsResult { + + byte[] getMinAsBytes(); + + byte[] getMaxAsBytes(); + + Object getMin(); + + Object getMax(); + + BitSet getNullBits(); --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1102#discussion_r126616762 --- Diff: core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java --- @@ -29,26 +32,50 @@ */ public class ValueEncoderMeta implements Serializable { - /** - * maxValue - */ + private BitSet nullBitSet; --- End diff -- fixed --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder/3014/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Failed with Spark 1.6, Please check CI http://144.76.159.231:8080/job/ApacheCarbonPRBuilder/427/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Failed with Spark 1.6, Please check CI http://144.76.159.231:8080/job/ApacheCarbonPRBuilder/429/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder/3017/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder/3043/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1102 Build Success with Spark 1.6, Please check CI http://144.76.159.231:8080/job/ApacheCarbonPRBuilder/455/ --- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at [hidden email] or file a JIRA ticket with INFRA. --- |
Free forum by Nabble | Edit this page |