Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/1896/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/9944/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/9948/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Failed with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/1899/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2978 retest this please --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/1691/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/9951/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2978 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/1901/ --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240519199 --- Diff: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java --- @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.scan.result.vector.impl.directread; + +import java.util.BitSet; + +/** + * It is sort of a marker interface to let execution engine know that it is appendable/sequential + * data adding vector. It means we cannot add random rowids to it. + */ +public interface SequentialFill { --- End diff -- Please add @InterfaceAudience annotation --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240524219 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/AbstractNonDictionaryVectorFiller.java --- @@ -83,20 +84,37 @@ public StringVectorFiller(int numberOfRows, int actualDataLength) { @Override public void fillVector(byte[] data, CarbonColumnVector vector) { // start position will be used to store the current data position + boolean invertedIndex = vector instanceof ColumnarVectorWrapperDirectWithInvertedIndex + || vector instanceof SequentialFill; --- End diff -- It seems SequentialFill is not subclass of CarbonColumnVector? --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240524512 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/AbstractNonDictionaryVectorFiller.java --- @@ -83,20 +84,37 @@ public StringVectorFiller(int numberOfRows, int actualDataLength) { @Override public void fillVector(byte[] data, CarbonColumnVector vector) { // start position will be used to store the current data position + boolean invertedIndex = vector instanceof ColumnarVectorWrapperDirectWithInvertedIndex + || vector instanceof SequentialFill; + int localOffset = 0; ByteUtil.UnsafeComparer comparator = ByteUtil.UnsafeComparer.INSTANCE; - for (int i = 0; i < numberOfRows; i++) { - int length = (((data[localOffset] & 0xFF) << 8) | (data[localOffset + 1] & 0xFF)); - localOffset += 2; - if (comparator.equals(CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY, 0, - CarbonCommonConstants.MEMBER_DEFAULT_VAL_ARRAY.length, data, localOffset, length)) { - vector.putNull(i); - } else { - vector.putArray(i, localOffset, length); + if (invertedIndex) { --- End diff -- can you add comment for this if-else branch --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240526032 --- Diff: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/CarbonColumnVectorImpl.java --- @@ -367,7 +378,14 @@ public void setBlockDataType(DataType blockDataType) { } @Override public void setLazyPage(LazyPageLoader lazyPage) { - lazyPage.loadPage(); + this.lazyPage = lazyPage; + } + + public void loadPage() { + if (lazyPage != null) { + lazyPage.loadPage(); + } + loaded = true; --- End diff -- if lazyPath is null, still set to true? --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240528010 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + + /** + * It is adapter class of complete ColumnarBatch. + */ + protected CarbonColumnVectorImpl columnVector; + + private DataType blockDataType; --- End diff -- it is column's datatype? --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240528169 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { --- End diff -- add space after ',' --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240528243 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + --- End diff -- remove extra empty line --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240530272 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + + /** + * It is adapter class of complete ColumnarBatch. + */ + protected CarbonColumnVectorImpl columnVector; + + private DataType blockDataType; + + private CarbonColumnVector dictionaryVector; + + private BitSet nullBitset; + + ColumnarVectorWrapperDirect(CarbonColumnVectorImpl columnVector) { + this.columnVector = columnVector; + this.dictionaryVector = columnVector.getDictionaryVector(); + this.nullBitset = new BitSet(); + } + + @Override public void setNullBits(BitSet nullBits) { + this.nullBitset = nullBits; + } + + @Override public void putBoolean(int rowId, boolean value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putBoolean(rowId, value); + } + } + + @Override public void putFloat(int rowId, float value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putFloat(rowId, value); + } + } + + @Override public void putShort(int rowId, short value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + } + + @Override public void putShorts(int rowId, int count, short value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + rowId++; + } + + } + + @Override public void putInt(int rowId, int value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putInt(rowId, value); + } + } + + @Override public void putInts(int rowId, int count, int value) { + columnVector.putInts(rowId, count, value); + } + + @Override public void putLong(int rowId, long value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putLong(rowId, value); + } + } + + @Override public void putLongs(int rowId, int count, long value) { + columnVector.putLongs(rowId, count, value); + } + + @Override public void putDecimal(int rowId, BigDecimal value, int precision) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + } + + @Override public void putDecimals(int rowId, int count, BigDecimal value, int precision) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + rowId++; + } + } + + @Override public void putDouble(int rowId, double value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDouble(rowId, value); + } + } + + @Override public void putDoubles(int rowId, int count, double value) { + columnVector.putDoubles(rowId, count, value); + } + + @Override public void putByteArray(int rowId, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + } + + @Override + public void putBytes(int rowId, int count, byte[] value) { --- End diff -- It seems this is only used for filling default value, can we change its signature and avoid the null bit check inside? --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240530928 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + + /** + * It is adapter class of complete ColumnarBatch. + */ + protected CarbonColumnVectorImpl columnVector; + + private DataType blockDataType; + + private CarbonColumnVector dictionaryVector; + + private BitSet nullBitset; + + ColumnarVectorWrapperDirect(CarbonColumnVectorImpl columnVector) { + this.columnVector = columnVector; + this.dictionaryVector = columnVector.getDictionaryVector(); + this.nullBitset = new BitSet(); + } + + @Override public void setNullBits(BitSet nullBits) { + this.nullBitset = nullBits; + } + + @Override public void putBoolean(int rowId, boolean value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putBoolean(rowId, value); + } + } + + @Override public void putFloat(int rowId, float value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putFloat(rowId, value); + } + } + + @Override public void putShort(int rowId, short value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + } + + @Override public void putShorts(int rowId, int count, short value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + rowId++; + } + + } + + @Override public void putInt(int rowId, int value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putInt(rowId, value); + } + } + + @Override public void putInts(int rowId, int count, int value) { + columnVector.putInts(rowId, count, value); + } + + @Override public void putLong(int rowId, long value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putLong(rowId, value); + } + } + + @Override public void putLongs(int rowId, int count, long value) { + columnVector.putLongs(rowId, count, value); + } + + @Override public void putDecimal(int rowId, BigDecimal value, int precision) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + } + + @Override public void putDecimals(int rowId, int count, BigDecimal value, int precision) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + rowId++; + } + } + + @Override public void putDouble(int rowId, double value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDouble(rowId, value); + } + } + + @Override public void putDoubles(int rowId, int count, double value) { + columnVector.putDoubles(rowId, count, value); + } + + @Override public void putByteArray(int rowId, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + } + + @Override + public void putBytes(int rowId, int count, byte[] value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + rowId++; + } + } + + @Override public void putByteArray(int rowId, int offset, int length, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, offset, length, value); + } + } + + @Override public void putNull(int rowId) { + columnVector.putNull(rowId); + } + + @Override public void putNulls(int rowId, int count) { + columnVector.putNulls(rowId, count); + } + + @Override public void putNotNull(int rowId) { + columnVector.putNotNull(rowId); + } + + @Override public void putNotNull(int rowId, int count) { + } + + @Override public boolean isNull(int rowId) { + return columnVector.isNullAt(rowId); + } + + @Override public void putObject(int rowId, Object obj) { + //TODO handle complex types --- End diff -- should throw UnsupportedOperationException --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240530952 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + + /** + * It is adapter class of complete ColumnarBatch. + */ + protected CarbonColumnVectorImpl columnVector; + + private DataType blockDataType; + + private CarbonColumnVector dictionaryVector; + + private BitSet nullBitset; + + ColumnarVectorWrapperDirect(CarbonColumnVectorImpl columnVector) { + this.columnVector = columnVector; + this.dictionaryVector = columnVector.getDictionaryVector(); + this.nullBitset = new BitSet(); + } + + @Override public void setNullBits(BitSet nullBits) { + this.nullBitset = nullBits; + } + + @Override public void putBoolean(int rowId, boolean value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putBoolean(rowId, value); + } + } + + @Override public void putFloat(int rowId, float value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putFloat(rowId, value); + } + } + + @Override public void putShort(int rowId, short value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + } + + @Override public void putShorts(int rowId, int count, short value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + rowId++; + } + + } + + @Override public void putInt(int rowId, int value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putInt(rowId, value); + } + } + + @Override public void putInts(int rowId, int count, int value) { + columnVector.putInts(rowId, count, value); + } + + @Override public void putLong(int rowId, long value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putLong(rowId, value); + } + } + + @Override public void putLongs(int rowId, int count, long value) { + columnVector.putLongs(rowId, count, value); + } + + @Override public void putDecimal(int rowId, BigDecimal value, int precision) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + } + + @Override public void putDecimals(int rowId, int count, BigDecimal value, int precision) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + rowId++; + } + } + + @Override public void putDouble(int rowId, double value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDouble(rowId, value); + } + } + + @Override public void putDoubles(int rowId, int count, double value) { + columnVector.putDoubles(rowId, count, value); + } + + @Override public void putByteArray(int rowId, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + } + + @Override + public void putBytes(int rowId, int count, byte[] value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + rowId++; + } + } + + @Override public void putByteArray(int rowId, int offset, int length, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, offset, length, value); + } + } + + @Override public void putNull(int rowId) { + columnVector.putNull(rowId); + } + + @Override public void putNulls(int rowId, int count) { + columnVector.putNulls(rowId, count); + } + + @Override public void putNotNull(int rowId) { + columnVector.putNotNull(rowId); + } + + @Override public void putNotNull(int rowId, int count) { + } + + @Override public boolean isNull(int rowId) { + return columnVector.isNullAt(rowId); + } + + @Override public void putObject(int rowId, Object obj) { + //TODO handle complex types + } + + @Override public Object getData(int rowId) { + //TODO handle complex types + return null; --- End diff -- should throw UnsupportedOperationException --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240531041 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/ColumnarVectorWrapperDirect.java --- @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.presto; + +import java.math.BigDecimal; +import java.util.BitSet; + +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector; +import org.apache.carbondata.core.scan.result.vector.CarbonDictionary; +import org.apache.carbondata.core.scan.result.vector.impl.CarbonColumnVectorImpl; +import org.apache.carbondata.core.scan.result.vector.impl.directread.SequentialFill; +import org.apache.carbondata.core.scan.scanner.LazyPageLoader; + +/** + * Fills the vector directly with out considering any deleted rows. + */ +class ColumnarVectorWrapperDirect implements CarbonColumnVector,SequentialFill { + + + /** + * It is adapter class of complete ColumnarBatch. + */ + protected CarbonColumnVectorImpl columnVector; + + private DataType blockDataType; + + private CarbonColumnVector dictionaryVector; + + private BitSet nullBitset; + + ColumnarVectorWrapperDirect(CarbonColumnVectorImpl columnVector) { + this.columnVector = columnVector; + this.dictionaryVector = columnVector.getDictionaryVector(); + this.nullBitset = new BitSet(); + } + + @Override public void setNullBits(BitSet nullBits) { + this.nullBitset = nullBits; + } + + @Override public void putBoolean(int rowId, boolean value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putBoolean(rowId, value); + } + } + + @Override public void putFloat(int rowId, float value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putFloat(rowId, value); + } + } + + @Override public void putShort(int rowId, short value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + } + + @Override public void putShorts(int rowId, int count, short value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putShort(rowId, value); + } + rowId++; + } + + } + + @Override public void putInt(int rowId, int value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putInt(rowId, value); + } + } + + @Override public void putInts(int rowId, int count, int value) { + columnVector.putInts(rowId, count, value); + } + + @Override public void putLong(int rowId, long value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putLong(rowId, value); + } + } + + @Override public void putLongs(int rowId, int count, long value) { + columnVector.putLongs(rowId, count, value); + } + + @Override public void putDecimal(int rowId, BigDecimal value, int precision) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + } + + @Override public void putDecimals(int rowId, int count, BigDecimal value, int precision) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDecimal(rowId, value, precision); + } + rowId++; + } + } + + @Override public void putDouble(int rowId, double value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putDouble(rowId, value); + } + } + + @Override public void putDoubles(int rowId, int count, double value) { + columnVector.putDoubles(rowId, count, value); + } + + @Override public void putByteArray(int rowId, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + } + + @Override + public void putBytes(int rowId, int count, byte[] value) { + for (int i = 0; i < count; i++) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, value); + } + rowId++; + } + } + + @Override public void putByteArray(int rowId, int offset, int length, byte[] value) { + if (nullBitset.get(rowId)) { + columnVector.putNull(rowId); + } else { + columnVector.putByteArray(rowId, offset, length, value); + } + } + + @Override public void putNull(int rowId) { + columnVector.putNull(rowId); + } + + @Override public void putNulls(int rowId, int count) { + columnVector.putNulls(rowId, count); + } + + @Override public void putNotNull(int rowId) { + columnVector.putNotNull(rowId); + } + + @Override public void putNotNull(int rowId, int count) { + } + + @Override public boolean isNull(int rowId) { + return columnVector.isNullAt(rowId); + } + + @Override public void putObject(int rowId, Object obj) { + //TODO handle complex types + } + + @Override public Object getData(int rowId) { + //TODO handle complex types + return null; + } + + @Override public void reset() { + if (null != dictionaryVector) { + dictionaryVector.reset(); + } + } + + @Override public DataType getType() { + return columnVector.getType(); + } + + @Override public DataType getBlockDataType() { + return blockDataType; + } + + @Override public void setBlockDataType(DataType blockDataType) { + this.blockDataType = blockDataType; + } + + @Override public void setDictionary(CarbonDictionary dictionary) { + columnVector.setDictionary(dictionary); + } + + @Override public boolean hasDictionary() { + return columnVector.hasDictionary(); + } + + + @Override public CarbonColumnVector getDictionaryVector() { + return dictionaryVector; + } + + @Override public void putByte(int rowId, byte value) { + columnVector.putByte(rowId, value); + } + + @Override public void setFilteredRowsExist(boolean filteredRowsExist) { + --- End diff -- should throw UnsupportedOperationException --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2978#discussion_r240551915 --- Diff: core/src/main/java/org/apache/carbondata/core/scan/result/vector/impl/directread/SequentialFill.java --- @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.core.scan.result.vector.impl.directread; + +import java.util.BitSet; + +/** + * It is sort of a marker interface to let execution engine know that it is appendable/sequential + * data adding vector. It means we cannot add random rowids to it. + */ +public interface SequentialFill { --- End diff -- ok --- |
Free forum by Nabble | Edit this page |