Indhumathi27 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r562546331 ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -141,100 +165,116 @@ public void fillVector(Object valuesToBeConverted, int size, // for string, varchar, binary, date, decimal types lengthStoredInBytes = shortSizeInBytes; } - byte[] data = (byte[]) valuesToBeConverted; - if (pageType == DataTypes.BYTE) { - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal.valueOf(data[i], scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); - } - vector.putDecimal(i, value, precision); - } - } - } else if (pageType == DataTypes.SHORT) { + if (this instanceof DecimalUnscaledConverter && scale < newMeasureScale) { + scale = newMeasureScale; + } + + if (valuesToBeConverted instanceof byte[][]) { + byte[][] data = (byte[][]) valuesToBeConverted; for (int i = 0; i < size; i++) { if (nullBitSet.get(i)) { vector.putNull(i); } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.toShortLittleEndian(data, i * shortSizeInBytes), - scale); + BigInteger bigInteger = new BigInteger(data[i]); + BigDecimal value = new BigDecimal(bigInteger, scale); if (value.scale() < newMeasureScale) { value = value.setScale(newMeasureScale); } vector.putDecimal(i, value, precision); } } - } else if (pageType == DataTypes.SHORT_INT) { - int shortIntSizeInBytes = DataTypes.SHORT_INT.getSizeInBytes(); - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.valueOf3Bytes(data, i * shortIntSizeInBytes), - scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); + } else if (valuesToBeConverted instanceof byte[]) { + byte[] data = (byte[]) valuesToBeConverted; + if (pageType == DataTypes.BYTE) { + for (int i = 0; i < size; i++) { + if (nullBitSet.get(i)) { + vector.putNull(i); + } else { + BigDecimal value = BigDecimal.valueOf(data[i], scale); Review comment: I think, we can still refactor the code for Cases: Byte, Short, Short_Int, Int and Long, As only difference is getting Bigdecimal value based on page data type ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-766553537 Build Failed with Spark 2.4.5, Please check CI http://121.244.95.60:12545/job/ApacheCarbon_PR_Builder_2.4.5/3293/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-766567465 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12545/job/ApacheCarbonPRBuilder2.3/5051/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-766553537 ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akkio-97 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565244018 ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -141,100 +165,116 @@ public void fillVector(Object valuesToBeConverted, int size, // for string, varchar, binary, date, decimal types lengthStoredInBytes = shortSizeInBytes; } - byte[] data = (byte[]) valuesToBeConverted; - if (pageType == DataTypes.BYTE) { - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal.valueOf(data[i], scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); - } - vector.putDecimal(i, value, precision); - } - } - } else if (pageType == DataTypes.SHORT) { + if (this instanceof DecimalUnscaledConverter && scale < newMeasureScale) { + scale = newMeasureScale; + } + + if (valuesToBeConverted instanceof byte[][]) { + byte[][] data = (byte[][]) valuesToBeConverted; for (int i = 0; i < size; i++) { if (nullBitSet.get(i)) { vector.putNull(i); } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.toShortLittleEndian(data, i * shortSizeInBytes), - scale); + BigInteger bigInteger = new BigInteger(data[i]); + BigDecimal value = new BigDecimal(bigInteger, scale); if (value.scale() < newMeasureScale) { value = value.setScale(newMeasureScale); } vector.putDecimal(i, value, precision); } } - } else if (pageType == DataTypes.SHORT_INT) { - int shortIntSizeInBytes = DataTypes.SHORT_INT.getSizeInBytes(); - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.valueOf3Bytes(data, i * shortIntSizeInBytes), - scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); + } else if (valuesToBeConverted instanceof byte[]) { + byte[] data = (byte[]) valuesToBeConverted; + if (pageType == DataTypes.BYTE) { + for (int i = 0; i < size; i++) { + if (nullBitSet.get(i)) { + vector.putNull(i); + } else { + BigDecimal value = BigDecimal.valueOf(data[i], scale); Review comment: Doing that might lead to NPE if Bigdecimal value is not initialized in any of the mentioned cases ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akkio-97 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565258047 ########## File path: integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/SparkStoreCreatorForPresto.scala ########## @@ -365,6 +366,15 @@ class SparkStoreCreatorForPresto extends QueryTest with BeforeAndAfterAll{ sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO TABLE streaming_table""") } + test("Test decimal unscaled converter") { + sql("drop table if exists array_decimal") + sql( + "CREATE TABLE IF NOT EXISTS array_decimal (salary array<decimal(20,3)>) STORED AS " + Review comment: can add but it is of no use as the flow will be common for both ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akkio-97 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565258047 ########## File path: integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/SparkStoreCreatorForPresto.scala ########## @@ -365,6 +366,15 @@ class SparkStoreCreatorForPresto extends QueryTest with BeforeAndAfterAll{ sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO TABLE streaming_table""") } + test("Test decimal unscaled converter") { + sql("drop table if exists array_decimal") + sql( + "CREATE TABLE IF NOT EXISTS array_decimal (salary array<decimal(20,3)>) STORED AS " + Review comment: can add but it is of no use as the flow will be common for both array and struct. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
Indhumathi27 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565273686 ########## File path: integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/SparkStoreCreatorForPresto.scala ########## @@ -365,6 +366,15 @@ class SparkStoreCreatorForPresto extends QueryTest with BeforeAndAfterAll{ sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO TABLE streaming_table""") } + test("Test decimal unscaled converter") { + sql("drop table if exists array_decimal") + sql( + "CREATE TABLE IF NOT EXISTS array_decimal (salary array<decimal(20,3)>) STORED AS " + Review comment: better to add, as it is a new scenario without testcase ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
Indhumathi27 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565277742 ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -141,100 +165,116 @@ public void fillVector(Object valuesToBeConverted, int size, // for string, varchar, binary, date, decimal types lengthStoredInBytes = shortSizeInBytes; } - byte[] data = (byte[]) valuesToBeConverted; - if (pageType == DataTypes.BYTE) { - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal.valueOf(data[i], scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); - } - vector.putDecimal(i, value, precision); - } - } - } else if (pageType == DataTypes.SHORT) { + if (this instanceof DecimalUnscaledConverter && scale < newMeasureScale) { + scale = newMeasureScale; + } + + if (valuesToBeConverted instanceof byte[][]) { + byte[][] data = (byte[][]) valuesToBeConverted; for (int i = 0; i < size; i++) { if (nullBitSet.get(i)) { vector.putNull(i); } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.toShortLittleEndian(data, i * shortSizeInBytes), - scale); + BigInteger bigInteger = new BigInteger(data[i]); + BigDecimal value = new BigDecimal(bigInteger, scale); if (value.scale() < newMeasureScale) { value = value.setScale(newMeasureScale); } vector.putDecimal(i, value, precision); } } - } else if (pageType == DataTypes.SHORT_INT) { - int shortIntSizeInBytes = DataTypes.SHORT_INT.getSizeInBytes(); - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.valueOf3Bytes(data, i * shortIntSizeInBytes), - scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); + } else if (valuesToBeConverted instanceof byte[]) { + byte[] data = (byte[]) valuesToBeConverted; + if (pageType == DataTypes.BYTE) { + for (int i = 0; i < size; i++) { + if (nullBitSet.get(i)) { + vector.putNull(i); + } else { + BigDecimal value = BigDecimal.valueOf(data[i], scale); Review comment: i think, it will not lead to NPE, if all cases are handled. Please check ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768309765 Build Success with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3604/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768310088 Build Success with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5364/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768406401 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5365/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768413049 Build Failed with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3605/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768591890 Build Success with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5367/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768592460 Build Failed with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3607/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768895232 Build Success with Spark 2.3.4, Please check CI http://121.244.95.60:12444/job/ApacheCarbonPRBuilder2.3/5368/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
CarbonDataQA2 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-768895460 Build Success with Spark 2.4.5, Please check CI http://121.244.95.60:12444/job/ApacheCarbon_PR_Builder_2.4.5/3608/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
akkio-97 commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r565936260 ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -141,100 +165,116 @@ public void fillVector(Object valuesToBeConverted, int size, // for string, varchar, binary, date, decimal types lengthStoredInBytes = shortSizeInBytes; } - byte[] data = (byte[]) valuesToBeConverted; - if (pageType == DataTypes.BYTE) { - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal.valueOf(data[i], scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); - } - vector.putDecimal(i, value, precision); - } - } - } else if (pageType == DataTypes.SHORT) { + if (this instanceof DecimalUnscaledConverter && scale < newMeasureScale) { + scale = newMeasureScale; + } + + if (valuesToBeConverted instanceof byte[][]) { + byte[][] data = (byte[][]) valuesToBeConverted; for (int i = 0; i < size; i++) { if (nullBitSet.get(i)) { vector.putNull(i); } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.toShortLittleEndian(data, i * shortSizeInBytes), - scale); + BigInteger bigInteger = new BigInteger(data[i]); + BigDecimal value = new BigDecimal(bigInteger, scale); if (value.scale() < newMeasureScale) { value = value.setScale(newMeasureScale); } vector.putDecimal(i, value, precision); } } - } else if (pageType == DataTypes.SHORT_INT) { - int shortIntSizeInBytes = DataTypes.SHORT_INT.getSizeInBytes(); - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigDecimal value = BigDecimal - .valueOf(ByteUtil.valueOf3Bytes(data, i * shortIntSizeInBytes), - scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); + } else if (valuesToBeConverted instanceof byte[]) { + byte[] data = (byte[]) valuesToBeConverted; + if (pageType == DataTypes.BYTE) { + for (int i = 0; i < size; i++) { + if (nullBitSet.get(i)) { + vector.putNull(i); + } else { + BigDecimal value = BigDecimal.valueOf(data[i], scale); Review comment: done ########## File path: integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/SparkStoreCreatorForPresto.scala ########## @@ -365,6 +366,15 @@ class SparkStoreCreatorForPresto extends QueryTest with BeforeAndAfterAll{ sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/IUD/dest.csv' INTO TABLE streaming_table""") } + test("Test decimal unscaled converter") { + sql("drop table if exists array_decimal") + sql( + "CREATE TABLE IF NOT EXISTS array_decimal (salary array<decimal(20,3)>) STORED AS " + Review comment: okay ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -88,11 +88,34 @@ void fillVector(Object valuesToBeConverted, int size, ColumnVectorInfo info, Bit DecimalConverterType getDecimalConverterType(); + default int getPrecisionForDimension(CarbonColumnVector vector) { Review comment: okay ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -328,9 +328,29 @@ public BigDecimal getDecimal(Object valueToBeConverted) { public void fillVector(Object valuesToBeConverted, int size, ColumnVectorInfo vectorInfo, BitSet nullBitSet, DataType pageType) { CarbonColumnVector vector = getCarbonColumnVector(vectorInfo, nullBitSet); - //TODO handle complex child - int precision = vectorInfo.measure.getMeasure().getPrecision(); - int newMeasureScale = vectorInfo.measure.getMeasure().getScale(); + int precision; + int newMeasureScale; + if (vectorInfo.measure == null) { + // complex primitive decimal flow comes as dimension + precision = ((DecimalType) vector.getType()).getPrecision(); + newMeasureScale = ((DecimalType) vector.getType()).getScale(); + size = ColumnVectorInfo.getUpdatedPageSizeForChildVector(vectorInfo, size); + } else { + precision = vectorInfo.measure.getMeasure().getPrecision(); + newMeasureScale = vectorInfo.measure.getMeasure().getScale(); + } Review comment: done ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
Indhumathi27 commented on pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#issuecomment-769577390 LGTM ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
In reply to this post by GitBox
ajantha-bhat commented on a change in pull request #4073: URL: https://github.com/apache/carbondata/pull/4073#discussion_r566594717 ########## File path: core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalConverterFactory.java ########## @@ -324,33 +311,6 @@ public BigDecimal getDecimal(Object valueToBeConverted) { return new BigDecimal(bigInteger, scale); } - @Override - public void fillVector(Object valuesToBeConverted, int size, - ColumnVectorInfo vectorInfo, BitSet nullBitSet, DataType pageType) { - CarbonColumnVector vector = getCarbonColumnVector(vectorInfo, nullBitSet); - //TODO handle complex child - int precision = vectorInfo.measure.getMeasure().getPrecision(); - int newMeasureScale = vectorInfo.measure.getMeasure().getScale(); - if (scale < newMeasureScale) { - scale = newMeasureScale; - } - if (valuesToBeConverted instanceof byte[][]) { - byte[][] data = (byte[][]) valuesToBeConverted; - for (int i = 0; i < size; i++) { - if (nullBitSet.get(i)) { - vector.putNull(i); - } else { - BigInteger bigInteger = new BigInteger(data[i]); - BigDecimal value = new BigDecimal(bigInteger, scale); - if (value.scale() < newMeasureScale) { - value = value.setScale(newMeasureScale); - } - vector.putDecimal(i, value, precision); - } - } - } Review comment: add an else check and call the super.fillvector if it is not byte[][], instead of copying this code in super class and changing its implementation. super class was meant only for one dimensional byte[] ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
Free forum by Nabble | Edit this page |