Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244254924 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java --- @@ -78,32 +72,33 @@ private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = "__HIVE_DEFAULT_PARTITION__"; /** - * @param carbondataColumnHandle + * @param columnHandle * @return */ - private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle carbondataColumnHandle) { - Type colType = carbondataColumnHandle.getColumnType(); - if (colType == BooleanType.BOOLEAN) { + private static DataType spi2CarbondataTypeMapper(HiveColumnHandle columnHandle) { + HiveType colType = columnHandle.getHiveType(); + if (colType.equals(HiveType.HIVE_BOOLEAN)) { return DataTypes.BOOLEAN; - } else if (colType == SmallintType.SMALLINT) { + } else if (colType.equals(HiveType.HIVE_SHORT)) { return DataTypes.SHORT; - } else if (colType == IntegerType.INTEGER) { + } else if (colType.equals(HiveType.HIVE_INT)) { return DataTypes.INT; - } else if (colType == BigintType.BIGINT) { + } else if (colType.equals(HiveType.HIVE_LONG)) { return DataTypes.LONG; - } else if (colType == DoubleType.DOUBLE) { + } else if (colType.equals(HiveType.HIVE_DOUBLE)) { return DataTypes.DOUBLE; - } else if (colType == VarcharType.VARCHAR) { + } else if (colType.equals(HiveType.HIVE_STRING)) { return DataTypes.STRING; - } else if (colType == DateType.DATE) { + } else if (colType.equals(HiveType.HIVE_DATE)) { return DataTypes.DATE; - } else if (colType == TimestampType.TIMESTAMP) { + } else if (colType.equals(HiveType.HIVE_TIMESTAMP)) { return DataTypes.TIMESTAMP; - } else if (colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()))) { - return DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()); - } else { + } + else if (colType.getTypeInfo() instanceof DecimalTypeInfo) { + DecimalTypeInfo typeInfo = (DecimalTypeInfo) colType.getTypeInfo(); + return DataTypes.createDecimalType(typeInfo.getPrecision(),typeInfo.getScale()); + } + else { --- End diff -- move up --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/10305/ --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244269435 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java --- @@ -17,62 +17,150 @@ package org.apache.carbondata.presto; -import javax.inject.Inject; +import java.util.function.Supplier; import static java.util.Objects.requireNonNull; -import org.apache.carbondata.presto.impl.CarbonTableConfig; import org.apache.carbondata.presto.impl.CarbonTableReader; +import com.facebook.presto.hive.CoercionPolicy; +import com.facebook.presto.hive.DirectoryLister; +import com.facebook.presto.hive.FileFormatDataSourceStats; +import com.facebook.presto.hive.GenericHiveRecordCursorProvider; +import com.facebook.presto.hive.HadoopDirectoryLister; +import com.facebook.presto.hive.HdfsConfiguration; +import com.facebook.presto.hive.HdfsConfigurationUpdater; +import com.facebook.presto.hive.HdfsEnvironment; +import com.facebook.presto.hive.HiveClientConfig; +import com.facebook.presto.hive.HiveClientModule; +import com.facebook.presto.hive.HiveCoercionPolicy; +import com.facebook.presto.hive.HiveConnectorId; +import com.facebook.presto.hive.HiveEventClient; +import com.facebook.presto.hive.HiveFileWriterFactory; +import com.facebook.presto.hive.HiveHdfsConfiguration; +import com.facebook.presto.hive.HiveLocationService; +import com.facebook.presto.hive.HiveMetadataFactory; +import com.facebook.presto.hive.HiveNodePartitioningProvider; +import com.facebook.presto.hive.HivePageSinkProvider; +import com.facebook.presto.hive.HivePageSourceFactory; +import com.facebook.presto.hive.HivePartitionManager; +import com.facebook.presto.hive.HiveRecordCursorProvider; +import com.facebook.presto.hive.HiveSessionProperties; +import com.facebook.presto.hive.HiveSplitManager; +import com.facebook.presto.hive.HiveTableProperties; +import com.facebook.presto.hive.HiveTransactionManager; +import com.facebook.presto.hive.HiveTypeTranslator; +import com.facebook.presto.hive.HiveWriterStats; +import com.facebook.presto.hive.LocationService; +import com.facebook.presto.hive.NamenodeStats; +import com.facebook.presto.hive.OrcFileWriterConfig; +import com.facebook.presto.hive.OrcFileWriterFactory; +import com.facebook.presto.hive.PartitionUpdate; +import com.facebook.presto.hive.RcFileFileWriterFactory; +import com.facebook.presto.hive.TableParameterCodec; +import com.facebook.presto.hive.TransactionalMetadata; +import com.facebook.presto.hive.TypeTranslator; +import com.facebook.presto.hive.orc.DwrfPageSourceFactory; +import com.facebook.presto.hive.orc.OrcPageSourceFactory; +import com.facebook.presto.hive.parquet.ParquetPageSourceFactory; +import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider; +import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory; +import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider; +import com.facebook.presto.spi.connector.ConnectorPageSinkProvider; import com.facebook.presto.spi.connector.ConnectorPageSourceProvider; import com.facebook.presto.spi.connector.ConnectorSplitManager; -import com.facebook.presto.spi.type.Type; -import com.facebook.presto.spi.type.TypeManager; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer; import com.google.inject.Binder; -import com.google.inject.Module; import com.google.inject.Scopes; +import com.google.inject.TypeLiteral; +import com.google.inject.multibindings.Multibinder; +import io.airlift.event.client.EventClient; -import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature; -import static com.google.common.base.Preconditions.checkArgument; +import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.configuration.ConfigBinder.configBinder; +import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; -public class CarbondataModule implements Module { +import static org.weakref.jmx.ObjectNames.generatedNameOf; +import static org.weakref.jmx.guice.ExportBinder.newExporter; + +public class CarbondataModule extends HiveClientModule { --- End diff -- Please add comment for this class --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244269478 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java --- @@ -369,6 +369,24 @@ public static boolean createNewLockFile(String filePath, FileType fileType) thro LOCAL, HDFS, ALLUXIO, VIEWFS, S3 } + public static String addSchemeIfNotExists(String filePath) { --- End diff -- add comment --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244269584 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala --- @@ -157,7 +157,7 @@ case class CarbonCreateTableCommand( | tableName "$tableName", | dbName "$dbName", | tablePath "$tablePath", - | path "$tablePath", + | path "${FileFactory.addSchemeIfNotExists(tablePath)}", --- End diff -- Can you update the comment from line 147 to 150. I feel it need to be rephrased --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244276046 --- Diff: integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala --- @@ -80,7 +80,7 @@ object CarbonDataStoreCreator { UUID.randomUUID().toString)) // val factFilePath: String = new File(dataFilePath).getCanonicalPath val storeDir: File = new File(absoluteTableIdentifier.getTablePath) - CarbonUtil.deleteFoldersAndFiles(storeDir) +// CarbonUtil.deleteFoldersAndFiles(storeDir) --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244276118 --- Diff: integration/presto/src/test/scala/org/apache/carbondata/presto/server/PrestoServer.scala --- @@ -129,6 +130,21 @@ class PrestoServer { } } + def execute(query: String) = { + + Try { + val conn: Connection = createJdbcConnection(dbName) + logger.info(s"***** executing the query ***** \n $query") --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244276202 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java --- @@ -78,32 +72,33 @@ private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = "__HIVE_DEFAULT_PARTITION__"; /** - * @param carbondataColumnHandle + * @param columnHandle * @return */ - private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle carbondataColumnHandle) { - Type colType = carbondataColumnHandle.getColumnType(); - if (colType == BooleanType.BOOLEAN) { + private static DataType spi2CarbondataTypeMapper(HiveColumnHandle columnHandle) { + HiveType colType = columnHandle.getHiveType(); + if (colType.equals(HiveType.HIVE_BOOLEAN)) { return DataTypes.BOOLEAN; - } else if (colType == SmallintType.SMALLINT) { + } else if (colType.equals(HiveType.HIVE_SHORT)) { return DataTypes.SHORT; - } else if (colType == IntegerType.INTEGER) { + } else if (colType.equals(HiveType.HIVE_INT)) { return DataTypes.INT; - } else if (colType == BigintType.BIGINT) { + } else if (colType.equals(HiveType.HIVE_LONG)) { return DataTypes.LONG; - } else if (colType == DoubleType.DOUBLE) { + } else if (colType.equals(HiveType.HIVE_DOUBLE)) { return DataTypes.DOUBLE; - } else if (colType == VarcharType.VARCHAR) { + } else if (colType.equals(HiveType.HIVE_STRING)) { return DataTypes.STRING; - } else if (colType == DateType.DATE) { + } else if (colType.equals(HiveType.HIVE_DATE)) { return DataTypes.DATE; - } else if (colType == TimestampType.TIMESTAMP) { + } else if (colType.equals(HiveType.HIVE_TIMESTAMP)) { return DataTypes.TIMESTAMP; - } else if (colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()))) { - return DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()); - } else { + } + else if (colType.getTypeInfo() instanceof DecimalTypeInfo) { --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244276214 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/PrestoFilterUtil.java --- @@ -78,32 +72,33 @@ private static final String HIVE_DEFAULT_DYNAMIC_PARTITION = "__HIVE_DEFAULT_PARTITION__"; /** - * @param carbondataColumnHandle + * @param columnHandle * @return */ - private static DataType spi2CarbondataTypeMapper(CarbondataColumnHandle carbondataColumnHandle) { - Type colType = carbondataColumnHandle.getColumnType(); - if (colType == BooleanType.BOOLEAN) { + private static DataType spi2CarbondataTypeMapper(HiveColumnHandle columnHandle) { + HiveType colType = columnHandle.getHiveType(); + if (colType.equals(HiveType.HIVE_BOOLEAN)) { return DataTypes.BOOLEAN; - } else if (colType == SmallintType.SMALLINT) { + } else if (colType.equals(HiveType.HIVE_SHORT)) { return DataTypes.SHORT; - } else if (colType == IntegerType.INTEGER) { + } else if (colType.equals(HiveType.HIVE_INT)) { return DataTypes.INT; - } else if (colType == BigintType.BIGINT) { + } else if (colType.equals(HiveType.HIVE_LONG)) { return DataTypes.LONG; - } else if (colType == DoubleType.DOUBLE) { + } else if (colType.equals(HiveType.HIVE_DOUBLE)) { return DataTypes.DOUBLE; - } else if (colType == VarcharType.VARCHAR) { + } else if (colType.equals(HiveType.HIVE_STRING)) { return DataTypes.STRING; - } else if (colType == DateType.DATE) { + } else if (colType.equals(HiveType.HIVE_DATE)) { return DataTypes.DATE; - } else if (colType == TimestampType.TIMESTAMP) { + } else if (colType.equals(HiveType.HIVE_TIMESTAMP)) { return DataTypes.TIMESTAMP; - } else if (colType.equals(DecimalType.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()))) { - return DataTypes.createDecimalType(carbondataColumnHandle.getPrecision(), - carbondataColumnHandle.getScale()); - } else { + } + else if (colType.getTypeInfo() instanceof DecimalTypeInfo) { + DecimalTypeInfo typeInfo = (DecimalTypeInfo) colType.getTypeInfo(); + return DataTypes.createDecimalType(typeInfo.getPrecision(),typeInfo.getScale()); + } + else { --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244278116 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataModule.java --- @@ -17,62 +17,150 @@ package org.apache.carbondata.presto; -import javax.inject.Inject; +import java.util.function.Supplier; import static java.util.Objects.requireNonNull; -import org.apache.carbondata.presto.impl.CarbonTableConfig; import org.apache.carbondata.presto.impl.CarbonTableReader; +import com.facebook.presto.hive.CoercionPolicy; +import com.facebook.presto.hive.DirectoryLister; +import com.facebook.presto.hive.FileFormatDataSourceStats; +import com.facebook.presto.hive.GenericHiveRecordCursorProvider; +import com.facebook.presto.hive.HadoopDirectoryLister; +import com.facebook.presto.hive.HdfsConfiguration; +import com.facebook.presto.hive.HdfsConfigurationUpdater; +import com.facebook.presto.hive.HdfsEnvironment; +import com.facebook.presto.hive.HiveClientConfig; +import com.facebook.presto.hive.HiveClientModule; +import com.facebook.presto.hive.HiveCoercionPolicy; +import com.facebook.presto.hive.HiveConnectorId; +import com.facebook.presto.hive.HiveEventClient; +import com.facebook.presto.hive.HiveFileWriterFactory; +import com.facebook.presto.hive.HiveHdfsConfiguration; +import com.facebook.presto.hive.HiveLocationService; +import com.facebook.presto.hive.HiveMetadataFactory; +import com.facebook.presto.hive.HiveNodePartitioningProvider; +import com.facebook.presto.hive.HivePageSinkProvider; +import com.facebook.presto.hive.HivePageSourceFactory; +import com.facebook.presto.hive.HivePartitionManager; +import com.facebook.presto.hive.HiveRecordCursorProvider; +import com.facebook.presto.hive.HiveSessionProperties; +import com.facebook.presto.hive.HiveSplitManager; +import com.facebook.presto.hive.HiveTableProperties; +import com.facebook.presto.hive.HiveTransactionManager; +import com.facebook.presto.hive.HiveTypeTranslator; +import com.facebook.presto.hive.HiveWriterStats; +import com.facebook.presto.hive.LocationService; +import com.facebook.presto.hive.NamenodeStats; +import com.facebook.presto.hive.OrcFileWriterConfig; +import com.facebook.presto.hive.OrcFileWriterFactory; +import com.facebook.presto.hive.PartitionUpdate; +import com.facebook.presto.hive.RcFileFileWriterFactory; +import com.facebook.presto.hive.TableParameterCodec; +import com.facebook.presto.hive.TransactionalMetadata; +import com.facebook.presto.hive.TypeTranslator; +import com.facebook.presto.hive.orc.DwrfPageSourceFactory; +import com.facebook.presto.hive.orc.OrcPageSourceFactory; +import com.facebook.presto.hive.parquet.ParquetPageSourceFactory; +import com.facebook.presto.hive.parquet.ParquetRecordCursorProvider; +import com.facebook.presto.hive.rcfile.RcFilePageSourceFactory; +import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider; +import com.facebook.presto.spi.connector.ConnectorPageSinkProvider; import com.facebook.presto.spi.connector.ConnectorPageSourceProvider; import com.facebook.presto.spi.connector.ConnectorSplitManager; -import com.facebook.presto.spi.type.Type; -import com.facebook.presto.spi.type.TypeManager; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.deser.std.FromStringDeserializer; import com.google.inject.Binder; -import com.google.inject.Module; import com.google.inject.Scopes; +import com.google.inject.TypeLiteral; +import com.google.inject.multibindings.Multibinder; +import io.airlift.event.client.EventClient; -import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature; -import static com.google.common.base.Preconditions.checkArgument; +import static com.google.inject.multibindings.Multibinder.newSetBinder; import static io.airlift.configuration.ConfigBinder.configBinder; +import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; -public class CarbondataModule implements Module { +import static org.weakref.jmx.ObjectNames.generatedNameOf; +import static org.weakref.jmx.guice.ExportBinder.newExporter; + +public class CarbondataModule extends HiveClientModule { --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244278266 --- Diff: core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java --- @@ -369,6 +369,24 @@ public static boolean createNewLockFile(String filePath, FileType fileType) thro LOCAL, HDFS, ALLUXIO, VIEWFS, S3 } + public static String addSchemeIfNotExists(String filePath) { --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244278709 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonCreateTableCommand.scala --- @@ -157,7 +157,7 @@ case class CarbonCreateTableCommand( | tableName "$tableName", | dbName "$dbName", | tablePath "$tablePath", - | path "$tablePath", + | path "${FileFactory.addSchemeIfNotExists(tablePath)}", --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/2063/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Failed with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/10316/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/2268/ --- |
In reply to this post by qiuchenjian-2
Github user kumarvishal09 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244104691 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java --- @@ -43,63 +43,78 @@ import static org.apache.carbondata.presto.Types.checkType; +import com.facebook.presto.hive.HdfsEnvironment; +import com.facebook.presto.hive.HiveClientConfig; +import com.facebook.presto.hive.HiveColumnHandle; +import com.facebook.presto.hive.HivePageSourceFactory; +import com.facebook.presto.hive.HivePageSourceProvider; +import com.facebook.presto.hive.HiveRecordCursorProvider; +import com.facebook.presto.hive.HiveSplit; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorSplit; -import com.facebook.presto.spi.connector.ConnectorPageSourceProvider; +import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; +import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TaskAttemptContextImpl; import org.apache.hadoop.mapred.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; - /** * Provider Class for Carbondata Page Source class. */ -public class CarbondataPageSourceProvider implements ConnectorPageSourceProvider { +public class CarbondataPageSourceProvider extends HivePageSourceProvider { - private String connectorId; private CarbonTableReader carbonTableReader; private String queryId ; - - @Inject public CarbondataPageSourceProvider(CarbondataConnectorId connectorId, + private HdfsEnvironment hdfsEnvironment; + + @Inject public CarbondataPageSourceProvider( + HiveClientConfig hiveClientConfig, + HdfsEnvironment hdfsEnvironment, + Set<HiveRecordCursorProvider> cursorProviders, + Set<HivePageSourceFactory> pageSourceFactories, + TypeManager typeManager, CarbonTableReader carbonTableReader) { - this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); + super(hiveClientConfig, hdfsEnvironment, cursorProviders, pageSourceFactories, typeManager); this.carbonTableReader = requireNonNull(carbonTableReader, "carbonTableReader is null"); + this.hdfsEnvironment = hdfsEnvironment; } @Override public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<ColumnHandle> columns) { - this.queryId = ((CarbondataSplit)split).getQueryId(); + HiveSplit carbonSplit = + checkType(split, HiveSplit.class, "split is not class HiveSplit"); + if (carbonSplit.getSchema().getProperty("queryId") == null) { + return super.createPageSource(transactionHandle, session, split, columns); + } + this.queryId = carbonSplit.getSchema().getProperty("queryId"); --- End diff -- Move this line above If condition and in if condition check if queryId is null --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/3019#discussion_r244312730 --- Diff: integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataPageSourceProvider.java --- @@ -43,63 +43,78 @@ import static org.apache.carbondata.presto.Types.checkType; +import com.facebook.presto.hive.HdfsEnvironment; +import com.facebook.presto.hive.HiveClientConfig; +import com.facebook.presto.hive.HiveColumnHandle; +import com.facebook.presto.hive.HivePageSourceFactory; +import com.facebook.presto.hive.HivePageSourceProvider; +import com.facebook.presto.hive.HiveRecordCursorProvider; +import com.facebook.presto.hive.HiveSplit; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorSplit; -import com.facebook.presto.spi.connector.ConnectorPageSourceProvider; +import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; +import com.facebook.presto.spi.type.TypeManager; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TaskAttemptContextImpl; import org.apache.hadoop.mapred.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; - /** * Provider Class for Carbondata Page Source class. */ -public class CarbondataPageSourceProvider implements ConnectorPageSourceProvider { +public class CarbondataPageSourceProvider extends HivePageSourceProvider { - private String connectorId; private CarbonTableReader carbonTableReader; private String queryId ; - - @Inject public CarbondataPageSourceProvider(CarbondataConnectorId connectorId, + private HdfsEnvironment hdfsEnvironment; + + @Inject public CarbondataPageSourceProvider( + HiveClientConfig hiveClientConfig, + HdfsEnvironment hdfsEnvironment, + Set<HiveRecordCursorProvider> cursorProviders, + Set<HivePageSourceFactory> pageSourceFactories, + TypeManager typeManager, CarbonTableReader carbonTableReader) { - this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); + super(hiveClientConfig, hdfsEnvironment, cursorProviders, pageSourceFactories, typeManager); this.carbonTableReader = requireNonNull(carbonTableReader, "carbonTableReader is null"); + this.hdfsEnvironment = hdfsEnvironment; } @Override public ConnectorPageSource createPageSource(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<ColumnHandle> columns) { - this.queryId = ((CarbondataSplit)split).getQueryId(); + HiveSplit carbonSplit = + checkType(split, HiveSplit.class, "split is not class HiveSplit"); + if (carbonSplit.getSchema().getProperty("queryId") == null) { + return super.createPageSource(transactionHandle, session, split, columns); + } + this.queryId = carbonSplit.getSchema().getProperty("queryId"); --- End diff -- ok --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.2.1, Please check CI http://95.216.28.178:8080/job/ApacheCarbonPRBuilder1/2275/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder2.1/2070/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/3019 Build Success with Spark 2.3.2, Please check CI http://136.243.101.176:8080/job/carbondataprbuilder2.3/10324/ --- |
Free forum by Nabble | Edit this page |