ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476426320 ########## File path: core/src/main/java/org/apache/carbondata/core/scan/collector/impl/DictionaryBasedVectorResultCollector.java ########## @@ -98,6 +98,14 @@ void prepareDimensionAndMeasureColumnVectors() { columnVectorInfo.dimension = queryDimensions[i]; columnVectorInfo.ordinal = queryDimensions[i].getDimension().getOrdinal(); allColumnInfo[queryDimensions[i].getOrdinal()] = columnVectorInfo; + } else if (queryDimensions[i].getDimension().isComplex()) { Review comment: If you see carefully, line 109 is queryDimensions[i].getDimension().getDataType() != DataTypes.DATE, complex is also !=Date. so it was never entering the expected branch ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476427767 ########## File path: integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ComplexTypeStreamReader.java ########## @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto.readers; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import io.prestosql.spi.block.ArrayBlock; +import io.prestosql.spi.block.RowBlock; +import io.prestosql.spi.type.*; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.metadata.datatype.StructField; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; + +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; + +import org.apache.carbondata.presto.CarbonVectorBatch; +import org.apache.carbondata.presto.ColumnarVectorWrapperDirect; + +/** + * Class to read the complex type Stream [array/struct/map] + */ + +public class ComplexTypeStreamReader extends CarbonColumnVectorImpl + implements PrestoVectorBlockBuilder { + + protected int batchSize; + + protected Type type; + protected BlockBuilder builder; + + public ComplexTypeStreamReader(int batchSize, StructField field) { + super(batchSize, field.getDataType()); + this.batchSize = batchSize; + this.type = getType(field); + ArrayList<CarbonColumnVector> childrenList = new ArrayList<>(); + for (StructField child : field.getChildren()) { + childrenList.add(new ColumnarVectorWrapperDirect( + CarbonVectorBatch.createDirectStreamReader(this.batchSize, child.getDataType(), child))); + } + setChildrenVector(childrenList); + this.builder = type.createBlockBuilder(null, batchSize); + } + + Type getType(StructField field) { + DataType dataType = field.getDataType(); + if (dataType == DataTypes.STRING || dataType == DataTypes.VARCHAR) { + return VarcharType.VARCHAR; + } else if (dataType == DataTypes.SHORT) { + return SmallintType.SMALLINT; + } else if (dataType == DataTypes.INT) { + return IntegerType.INTEGER; + } else if (dataType == DataTypes.LONG) { + return BigintType.BIGINT; + } else if (dataType == DataTypes.DOUBLE) { + return DoubleType.DOUBLE; + } else if (dataType == DataTypes.FLOAT) { + return RealType.REAL; + } else if (dataType == DataTypes.BOOLEAN) { + return BooleanType.BOOLEAN; + } else if (dataType == DataTypes.BINARY) { + return VarbinaryType.VARBINARY; + } else if (dataType == DataTypes.DATE) { + return DateType.DATE; + } else if (dataType == DataTypes.TIMESTAMP) { + return TimestampType.TIMESTAMP; + } else if (dataType == DataTypes.BYTE) { + return TinyintType.TINYINT; + } else if (DataTypes.isDecimal(dataType)) { + org.apache.carbondata.core.metadata.datatype.DecimalType decimal = + (org.apache.carbondata.core.metadata.datatype.DecimalType) dataType; + return DecimalType.createDecimalType(decimal.getPrecision(), decimal.getScale()); + } else if (DataTypes.isArrayType(dataType)) { + return new ArrayType(getType(field.getChildren().get(0))); + } else if (DataTypes.isStructType(dataType)) { + List<RowType.Field> children = new ArrayList<>(); + for (StructField child : field.getChildren()) { + children.add(new RowType.Field(Optional.of(child.getFieldName()), getType(child))); + } + return RowType.from(children); + } else { + throw new UnsupportedOperationException("Unsupported type: " + dataType); + } + } + + @Override public Block buildBlock() { + return builder.build(); + } + + @Override public void setBatchSize(int batchSize) { + this.batchSize = batchSize; + } + + @Override public void putObject(int rowId, Object value) { + if (value == null) { + putNull(rowId); + } else { + getChildrenVector().get(0).putObject(rowId, value); Review comment: This is can be unsupported error, as we use only putComplexObject ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476428856 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: This is java, so here the logic is to assign multiple variables, hence it is already optimized to fill those info in util class. here is just assignment. You can try and let me know ! ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476429529 ########## File path: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/CarbonColumnVectorImpl.java ########## @@ -102,6 +109,57 @@ public CarbonColumnVectorImpl(int batchSize, DataType dataType) { } + @Override + public List<CarbonColumnVector> getChildrenVector() { + return childrenVector; + } + + public void setChildrenVector(ArrayList<CarbonColumnVector> childrenVector) { + this.childrenVector = childrenVector; + } + + public ArrayList<Integer> getNumberOfChildrenElementsInEachRow() { + return childElementsForEachRow; + } + + public void setNumberOfChildrenElementsInEachRow(ArrayList<Integer> childrenElements) { + this.childElementsForEachRow = childrenElements; + } + + public void setNumberOfChildrenElementsForArray(byte[] childPageData, int pageSize) { + // for complex array type, go through parent page to get the child information + ByteBuffer childInfoBuffer = ByteBuffer.wrap(childPageData); + ArrayList<Integer> childElementsForEachRow = new ArrayList<>(); + // osset will be an INT size and value will be another INT size, hence 2 * INT size Review comment: Need to have write flow understanding for this. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476433166 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveCodec.java ########## @@ -260,4 +265,52 @@ protected String debugInfo() { return this.toString(); } + // Utility class to update current vector to child vector in case of complex type handling + public static class VectorUtil { + private ColumnVectorInfo vectorInfo; + private int pageSize; + private CarbonColumnVector vector; + private DataType vectorDataType; + + public VectorUtil(ColumnVectorInfo vectorInfo, int pageSize, CarbonColumnVector vector, + DataType vectorDataType) { + this.vectorInfo = vectorInfo; + this.pageSize = pageSize; + this.vector = vector; + this.vectorDataType = vectorDataType; + } + + public int getPageSize() { + return pageSize; + } + + public CarbonColumnVector getVector() { + return vector; + } + + public DataType getVectorDataType() { + return vectorDataType; + } + + public VectorUtil checkAndUpdateToChildVector() { + Stack<CarbonColumnVector> vectorStack = vectorInfo.getVectorStack(); + if (vectorStack != null && vectorStack.peek() != null && vectorDataType.isComplexType()) { + if (vectorDataType.getName().equals("ARRAY")) { Review comment: can we can DataTypes.isArrayType(vectordatatype) ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476433166 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveCodec.java ########## @@ -260,4 +265,52 @@ protected String debugInfo() { return this.toString(); } + // Utility class to update current vector to child vector in case of complex type handling + public static class VectorUtil { + private ColumnVectorInfo vectorInfo; + private int pageSize; + private CarbonColumnVector vector; + private DataType vectorDataType; + + public VectorUtil(ColumnVectorInfo vectorInfo, int pageSize, CarbonColumnVector vector, + DataType vectorDataType) { + this.vectorInfo = vectorInfo; + this.pageSize = pageSize; + this.vector = vector; + this.vectorDataType = vectorDataType; + } + + public int getPageSize() { + return pageSize; + } + + public CarbonColumnVector getVector() { + return vector; + } + + public DataType getVectorDataType() { + return vectorDataType; + } + + public VectorUtil checkAndUpdateToChildVector() { + Stack<CarbonColumnVector> vectorStack = vectorInfo.getVectorStack(); + if (vectorStack != null && vectorStack.peek() != null && vectorDataType.isComplexType()) { + if (vectorDataType.getName().equals("ARRAY")) { Review comment: can we use DataTypes.isArrayType(vectordatatype)? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akashrn5 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476438157 ########## File path: integration/presto/src/main/prestosql/org/apache/carbondata/presto/readers/ComplexTypeStreamReader.java ########## @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto.readers; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import io.prestosql.spi.block.ArrayBlock; +import io.prestosql.spi.block.RowBlock; +import io.prestosql.spi.type.*; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.metadata.datatype.StructField; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; + +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; + +import org.apache.carbondata.presto.CarbonVectorBatch; +import org.apache.carbondata.presto.ColumnarVectorWrapperDirect; + +/** + * Class to read the complex type Stream [array/struct/map] + */ + +public class ComplexTypeStreamReader extends CarbonColumnVectorImpl + implements PrestoVectorBlockBuilder { + + protected int batchSize; + + protected Type type; + protected BlockBuilder builder; + + public ComplexTypeStreamReader(int batchSize, StructField field) { + super(batchSize, field.getDataType()); + this.batchSize = batchSize; + this.type = getType(field); + ArrayList<CarbonColumnVector> childrenList = new ArrayList<>(); + for (StructField child : field.getChildren()) { + childrenList.add(new ColumnarVectorWrapperDirect( + CarbonVectorBatch.createDirectStreamReader(this.batchSize, child.getDataType(), child))); + } + setChildrenVector(childrenList); + this.builder = type.createBlockBuilder(null, batchSize); + } + + Type getType(StructField field) { + DataType dataType = field.getDataType(); + if (dataType == DataTypes.STRING || dataType == DataTypes.VARCHAR) { + return VarcharType.VARCHAR; + } else if (dataType == DataTypes.SHORT) { + return SmallintType.SMALLINT; + } else if (dataType == DataTypes.INT) { + return IntegerType.INTEGER; + } else if (dataType == DataTypes.LONG) { + return BigintType.BIGINT; + } else if (dataType == DataTypes.DOUBLE) { + return DoubleType.DOUBLE; + } else if (dataType == DataTypes.FLOAT) { + return RealType.REAL; + } else if (dataType == DataTypes.BOOLEAN) { + return BooleanType.BOOLEAN; + } else if (dataType == DataTypes.BINARY) { + return VarbinaryType.VARBINARY; + } else if (dataType == DataTypes.DATE) { + return DateType.DATE; + } else if (dataType == DataTypes.TIMESTAMP) { + return TimestampType.TIMESTAMP; + } else if (dataType == DataTypes.BYTE) { + return TinyintType.TINYINT; + } else if (DataTypes.isDecimal(dataType)) { + org.apache.carbondata.core.metadata.datatype.DecimalType decimal = + (org.apache.carbondata.core.metadata.datatype.DecimalType) dataType; + return DecimalType.createDecimalType(decimal.getPrecision(), decimal.getScale()); + } else if (DataTypes.isArrayType(dataType)) { + return new ArrayType(getType(field.getChildren().get(0))); + } else if (DataTypes.isStructType(dataType)) { + List<RowType.Field> children = new ArrayList<>(); + for (StructField child : field.getChildren()) { + children.add(new RowType.Field(Optional.of(child.getFieldName()), getType(child))); + } + return RowType.from(children); + } else { + throw new UnsupportedOperationException("Unsupported type: " + dataType); + } + } + + @Override public Block buildBlock() { + return builder.build(); + } + + @Override public void setBatchSize(int batchSize) { + this.batchSize = batchSize; + } + + @Override public void putObject(int rowId, Object value) { + if (value == null) { + putNull(rowId); + } else { + getChildrenVector().get(0).putObject(rowId, value); Review comment: ok, if we use `putComplexObject` , then better no need of any implementation for this method, as you said can be changed to unsupported error. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476439065 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveCodec.java ########## @@ -260,4 +265,52 @@ protected String debugInfo() { return this.toString(); } + // Utility class to update current vector to child vector in case of complex type handling + public static class VectorUtil { + private ColumnVectorInfo vectorInfo; + private int pageSize; + private CarbonColumnVector vector; + private DataType vectorDataType; + + public VectorUtil(ColumnVectorInfo vectorInfo, int pageSize, CarbonColumnVector vector, + DataType vectorDataType) { + this.vectorInfo = vectorInfo; + this.pageSize = pageSize; + this.vector = vector; + this.vectorDataType = vectorDataType; + } + + public int getPageSize() { + return pageSize; + } + + public CarbonColumnVector getVector() { + return vector; + } + + public DataType getVectorDataType() { + return vectorDataType; + } + + public VectorUtil checkAndUpdateToChildVector() { Review comment: change this to static and pass all the fields required to build this class as a params and create instance of this class here in this method, constructor make it private ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akashrn5 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476439569 ########## File path: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/CarbonColumnVectorImpl.java ########## @@ -102,6 +109,57 @@ public CarbonColumnVectorImpl(int batchSize, DataType dataType) { } + @Override + public List<CarbonColumnVector> getChildrenVector() { + return childrenVector; + } + + public void setChildrenVector(ArrayList<CarbonColumnVector> childrenVector) { + this.childrenVector = childrenVector; + } + + public ArrayList<Integer> getNumberOfChildrenElementsInEachRow() { + return childElementsForEachRow; + } + + public void setNumberOfChildrenElementsInEachRow(ArrayList<Integer> childrenElements) { + this.childElementsForEachRow = childrenElements; + } + + public void setNumberOfChildrenElementsForArray(byte[] childPageData, int pageSize) { + // for complex array type, go through parent page to get the child information + ByteBuffer childInfoBuffer = ByteBuffer.wrap(childPageData); + ArrayList<Integer> childElementsForEachRow = new ArrayList<>(); + // osset will be an INT size and value will be another INT size, hence 2 * INT size Review comment: yeah, i agree, but its for developer purpose, whoever has write flow understanding, it will be easy for them to understand, as we have many such places with very good comments written by @kumarvishal09 and others. I suggest to add here.! ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akashrn5 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476440493 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: yeah, correct, and no need to try from my side..! ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476448290 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -282,6 +288,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) { vector.putFloat(rowId++, (max - ByteUtil.toIntLittleEndian(pageData, i)) / floatFactor); } + } else if (pageDataType == DataTypes.LONG) { Review comment: Why we are handling long here for float data type, for float type page data type cannot be long. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476450315 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: can we handle this in called method itself, handling in each and class its not correct, pls change this in other classes too ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476450601 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -282,6 +288,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) { vector.putFloat(rowId++, (max - ByteUtil.toIntLittleEndian(pageData, i)) / floatFactor); } + } else if (pageDataType == DataTypes.LONG) { Review comment: For AdaptiveFloating codec, float data was coming as long for complex primitive. So added for delta flow also. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476450315 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: can we handle this in caller method itself, handling in each and class its not correct, pls change this in other classes too ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476450315 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: can we handle this in caller method itself, handling in each and every class its not correct, pls change this in other classes too ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476544468 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: Direct compress needed a different handling because of decimal. So, had to update in each ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476544468 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -255,6 +255,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi CarbonColumnVector vector = vectorInfo.vector; BitSet deletedRows = vectorInfo.deletedRows; DataType vectorDataType = vector.getType(); + VectorUtil vectorUtil = new VectorUtil(vectorInfo, pageSize, vector, vectorDataType) Review comment: Direct compress needed a different handling because of decimal. So, coundn't handle in decodeDimensionByMeta ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476547595 ########## File path: core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/adaptive/AdaptiveDeltaFloatingCodec.java ########## @@ -282,6 +288,12 @@ public void decodeAndFillVector(byte[] pageData, ColumnVectorInfo vectorInfo, Bi for (int i = 0; i < size; i += DataTypes.INT.getSizeInBytes()) { vector.putFloat(rowId++, (max - ByteUtil.toIntLittleEndian(pageData, i)) / floatFactor); } + } else if (pageDataType == DataTypes.LONG) { Review comment: this means , In some scenario it's storing float data as Long [4bytes to 8 bytes]? Can u pls check the test once ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476550903 ########## File path: core/src/main/java/org/apache/carbondata/core/scan/result/vector/CarbonColumnVector.java ########## @@ -114,4 +115,20 @@ void setLazyPage(LazyPageLoader lazyPage); + // Add default implementation for interface, + // to avoid implementing presto required functions for spark or core module. + default List<CarbonColumnVector> getChildrenVector() { + return null; Review comment: return empty list from here ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
kumarvishal09 commented on a change in pull request #3887: URL: https://github.com/apache/carbondata/pull/3887#discussion_r476552867 ########## File path: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/CarbonColumnVectorImpl.java ########## @@ -70,10 +73,14 @@ private CarbonColumnVector dictionaryVector; + private List<CarbonColumnVector> childrenVector; + private LazyPageLoader lazyPage; private boolean loaded; + private ArrayList<Integer> childElementsForEachRow = null; Review comment: change this to List<Integer> and null assignment is not required ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
Free forum by Nabble | Edit this page |