jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r362792665 ########## File path: hadoop/pom.xml ########## @@ -30,6 +30,7 @@ <name>Apache CarbonData :: Hadoop</name> <properties> + <hive.version>1.2.1</hive.version> Review comment: Add this in parent pom.xml ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
Indhumathi27 commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r362794647 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); + if (udf instanceof GenericUDFIn) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + List<Expression> listExpr = new ArrayList<>(); + for (int i = right; i < ll.size(); i++) { + LiteralExpression literalExpression = new LiteralExpression(ll.get(i).getExprString(), + getDateType(ll.get(left).getTypeString())); + listExpr.add(literalExpression); + } + ListExpression listExpression = new ListExpression(listExpr); + return new InExpression(columnExpression, listExpression); + } else if (udf instanceof GenericUDFOPOr) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new OrExpression(leftExpression, rightExpression); + } else if (udf instanceof GenericUDFOPAnd) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new AndExpression(leftExpression, rightExpression); + + } else if (udf instanceof GenericUDFOPEqual) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = + new LiteralExpression(ll.get(right).getExprString().replace("'", ""), + getDateType(ll.get(right).getTypeString())); + return new EqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNotEqual) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new NotEqualsExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new EqualToExpression(columnExpression, literalExpression, true); + } else if (udf instanceof GenericUDFOPNotNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new NotEqualsExpression(columnExpression, literalExpression, true); + } else { + LOG.error("error:not find type" + udf.toString()); + } + } + return null; + } + + public static DataType getDateType(String type) { + if (type.toUpperCase().endsWith("INT")) { + return DataTypes.INT; + } else if (type.toUpperCase().endsWith("STRING")) { + return DataTypes.STRING; + } else if (type.toUpperCase().endsWith("LONG")) { + return DataTypes.LONG; + } else if (type.toUpperCase().endsWith("FLOAT")) { + return DataTypes.FLOAT; + } else if (type.toUpperCase().endsWith("DATE")) { + return DataTypes.DATE; + } else if (type.toUpperCase().endsWith("TIMESTAMP")) { + return DataTypes.TIMESTAMP; + } else if (type.toUpperCase().endsWith("DOUBLE")) { + return DataTypes.DOUBLE; + } else if (type.toUpperCase().endsWith("NULL")) { + return DataTypes.NULL; + } else if (type.toUpperCase().endsWith("BYTE")) { + return DataTypes.BYTE; + } else if (type.toUpperCase().endsWith("SHORT_INT")) { + return DataTypes.SHORT_INT; + } else if (type.toUpperCase().endsWith("VARCHAR")) { + return DataTypes.VARCHAR; + } + return null; Review comment: Is hive pushdown unsupported for complex data types? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
CarbonDataQA1 commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570565307 Build Failed with Spark 2.2.1, Please check CI http://121.244.95.60:12545/job/ApacheCarbonPRBuilder2.2/1434/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
CarbonDataQA1 commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570574899 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12545/job/ApacheCarbonPRBuilder2.3/1446/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
QiangCai commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570737447 retest this please ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
CarbonDataQA1 commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570746005 Build Failed with Spark 2.3.4, Please check CI http://121.244.95.60:12545/job/ApacheCarbonPRBuilder2.3/1458/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
xiaohui0318 commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570791458 retest this please ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
xiaohui0318 commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363039182 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); + if (udf instanceof GenericUDFIn) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + List<Expression> listExpr = new ArrayList<>(); + for (int i = right; i < ll.size(); i++) { + LiteralExpression literalExpression = new LiteralExpression(ll.get(i).getExprString(), + getDateType(ll.get(left).getTypeString())); + listExpr.add(literalExpression); + } + ListExpression listExpression = new ListExpression(listExpr); + return new InExpression(columnExpression, listExpression); + } else if (udf instanceof GenericUDFOPOr) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new OrExpression(leftExpression, rightExpression); + } else if (udf instanceof GenericUDFOPAnd) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new AndExpression(leftExpression, rightExpression); + + } else if (udf instanceof GenericUDFOPEqual) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = + new LiteralExpression(ll.get(right).getExprString().replace("'", ""), + getDateType(ll.get(right).getTypeString())); + return new EqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNotEqual) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new NotEqualsExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new EqualToExpression(columnExpression, literalExpression, true); + } else if (udf instanceof GenericUDFOPNotNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new NotEqualsExpression(columnExpression, literalExpression, true); + } else { + LOG.error("error:not find type" + udf.toString()); + } + } + return null; + } + + public static DataType getDateType(String type) { + if (type.toUpperCase().endsWith("INT")) { + return DataTypes.INT; + } else if (type.toUpperCase().endsWith("STRING")) { + return DataTypes.STRING; + } else if (type.toUpperCase().endsWith("LONG")) { + return DataTypes.LONG; + } else if (type.toUpperCase().endsWith("FLOAT")) { + return DataTypes.FLOAT; + } else if (type.toUpperCase().endsWith("DATE")) { + return DataTypes.DATE; + } else if (type.toUpperCase().endsWith("TIMESTAMP")) { + return DataTypes.TIMESTAMP; + } else if (type.toUpperCase().endsWith("DOUBLE")) { + return DataTypes.DOUBLE; + } else if (type.toUpperCase().endsWith("NULL")) { + return DataTypes.NULL; + } else if (type.toUpperCase().endsWith("BYTE")) { + return DataTypes.BYTE; + } else if (type.toUpperCase().endsWith("SHORT_INT")) { + return DataTypes.SHORT_INT; + } else if (type.toUpperCase().endsWith("VARCHAR")) { + return DataTypes.VARCHAR; + } + return null; Review comment: not support ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
CarbonDataQA1 commented on issue #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#issuecomment-570797724 Build Success with Spark 2.3.4, Please check CI http://121.244.95.60:12545/job/ApacheCarbonPRBuilder2.3/1460/ ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363070688 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java ########## @@ -455,7 +459,26 @@ protected DataMapFilter getFilterPredicates(Configuration configuration) { try { String filterExprString = configuration.get(FILTER_PREDICATE); if (filterExprString == null) { - return null; + String expr = configuration.get(TableScanDesc.FILTER_EXPR_CONF_STR); + if (expr == null) { + return null; + } + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = + Utilities.deserializeObject(expr, ExprNodeGenericFuncDesc.class); + LOG.debug("hive expression:" + exprNodeGenericFuncDesc.getGenericUDF()); + LOG.debug("hive expression string:" + exprNodeGenericFuncDesc.getExprString()); + Expression expression = + Hive2CarbonExpression.convertExprHive2Carbon(exprNodeGenericFuncDesc); Review comment: Can we do this filter expression conversion in MapredCarbonInputFormat.java (in carbondata-hive module) and set the FILTER_PREDICATE in the hadoop configuration, so that we donot need to add hive-exec dependency in carbondata-hadoop module. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363070758 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); + if (udf instanceof GenericUDFIn) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + List<Expression> listExpr = new ArrayList<>(); + for (int i = right; i < ll.size(); i++) { + LiteralExpression literalExpression = new LiteralExpression(ll.get(i).getExprString(), + getDateType(ll.get(left).getTypeString())); + listExpr.add(literalExpression); + } + ListExpression listExpression = new ListExpression(listExpr); + return new InExpression(columnExpression, listExpression); + } else if (udf instanceof GenericUDFOPOr) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new OrExpression(leftExpression, rightExpression); + } else if (udf instanceof GenericUDFOPAnd) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new AndExpression(leftExpression, rightExpression); + + } else if (udf instanceof GenericUDFOPEqual) { + ColumnExpression columnExpression = null; + if (ll.get(left) instanceof ExprNodeFieldDesc) { + LOG.debug("Complex types are not supported"); + return null; + } else { + columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + } + LiteralExpression literalExpression = + new LiteralExpression(ll.get(right).getExprString().replace("'", ""), + getDateType(ll.get(right).getTypeString())); + return new EqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPGreaterThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new GreaterThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNotEqual) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new NotEqualsExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPEqualOrLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanEqualToExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPLessThan) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(ll.get(right).getExprString(), + getDateType(ll.get(left).getTypeString())); + return new LessThanExpression(columnExpression, literalExpression); + } else if (udf instanceof GenericUDFOPNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new EqualToExpression(columnExpression, literalExpression, true); + } else if (udf instanceof GenericUDFOPNotNull) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + LiteralExpression literalExpression = new LiteralExpression(null, null); + return new NotEqualsExpression(columnExpression, literalExpression, true); + } else { + LOG.error("error:not find type" + udf.toString()); + } + } + return null; + } + + public static DataType getDateType(String type) { Review comment: Can you use `DataTypeUtil.valueOf` instead of creating a this func? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363070688 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java ########## @@ -455,7 +459,26 @@ protected DataMapFilter getFilterPredicates(Configuration configuration) { try { String filterExprString = configuration.get(FILTER_PREDICATE); if (filterExprString == null) { - return null; + String expr = configuration.get(TableScanDesc.FILTER_EXPR_CONF_STR); + if (expr == null) { + return null; + } + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = + Utilities.deserializeObject(expr, ExprNodeGenericFuncDesc.class); + LOG.debug("hive expression:" + exprNodeGenericFuncDesc.getGenericUDF()); + LOG.debug("hive expression string:" + exprNodeGenericFuncDesc.getExprString()); + Expression expression = + Hive2CarbonExpression.convertExprHive2Carbon(exprNodeGenericFuncDesc); Review comment: Can we do this filter expression conversion in MapredCarbonInputFormat.java (in carbondata-hive module) and set the FILTER_PREDICATE in the hadoop configuration, so that we donot need to add hive-exec dependency in carbondata-hadoop/flink/presto module. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363070899 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { Review comment: Can we add this in carbondata-hive module ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
jackylk commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363070934 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); + if (udf instanceof GenericUDFIn) { + ColumnExpression columnExpression = new ColumnExpression(ll.get(left).getCols().get(left), + getDateType(ll.get(left).getTypeString())); + List<Expression> listExpr = new ArrayList<>(); + for (int i = right; i < ll.size(); i++) { + LiteralExpression literalExpression = new LiteralExpression(ll.get(i).getExprString(), + getDateType(ll.get(left).getTypeString())); + listExpr.add(literalExpression); + } + ListExpression listExpression = new ListExpression(listExpr); + return new InExpression(columnExpression, listExpression); + } else if (udf instanceof GenericUDFOPOr) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new OrExpression(leftExpression, rightExpression); + } else if (udf instanceof GenericUDFOPAnd) { + Expression leftExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(left)); + Expression rightExpression = + convertExprHive2Carbon(exprNodeGenericFuncDesc.getChildren().get(right)); + return new AndExpression(leftExpression, rightExpression); + + } else if (udf instanceof GenericUDFOPEqual) { + ColumnExpression columnExpression = null; + if (ll.get(left) instanceof ExprNodeFieldDesc) { + LOG.debug("Complex types are not supported"); Review comment: Better to throw exception to indicate it is not supported ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
xiaohui0318 commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363280770 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java ########## @@ -455,7 +459,26 @@ protected DataMapFilter getFilterPredicates(Configuration configuration) { try { String filterExprString = configuration.get(FILTER_PREDICATE); if (filterExprString == null) { - return null; + String expr = configuration.get(TableScanDesc.FILTER_EXPR_CONF_STR); + if (expr == null) { + return null; + } + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = + Utilities.deserializeObject(expr, ExprNodeGenericFuncDesc.class); + LOG.debug("hive expression:" + exprNodeGenericFuncDesc.getGenericUDF()); + LOG.debug("hive expression string:" + exprNodeGenericFuncDesc.getExprString()); + Expression expression = + Hive2CarbonExpression.convertExprHive2Carbon(exprNodeGenericFuncDesc); Review comment: ok ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
dhatchayani commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363644468 ########## File path: hadoop/src/test/java/org/apache/carbondata/hadoop/ft/Hive2CarbonExpressionTest.java ########## @@ -0,0 +1,354 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.ft; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.apache.carbondata.core.constants.CarbonCommonConstants; +import org.apache.carbondata.core.metadata.schema.table.CarbonTable; +import org.apache.carbondata.core.util.CarbonProperties; +import org.apache.carbondata.hadoop.api.CarbonFileInputFormat; +import org.apache.carbondata.hadoop.testutil.StoreCreator; +import org.apache.carbondata.processing.loading.model.CarbonLoadModel; Review comment: can you please move this test class and the utility Hive2CarbonExpression to Hive module ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
dhatchayani commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363647514 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java ########## @@ -455,7 +459,26 @@ protected DataMapFilter getFilterPredicates(Configuration configuration) { try { String filterExprString = configuration.get(FILTER_PREDICATE); if (filterExprString == null) { - return null; + String expr = configuration.get(TableScanDesc.FILTER_EXPR_CONF_STR); + if (expr == null) { + return null; + } + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = + Utilities.deserializeObject(expr, ExprNodeGenericFuncDesc.class); + LOG.debug("hive expression:" + exprNodeGenericFuncDesc.getGenericUDF()); Review comment: please add isDebugEnabled() check ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
dhatchayani commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363651256 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); Review comment: please rename l1 and add comments for better readability ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
dhatchayani commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363647514 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java ########## @@ -455,7 +459,26 @@ protected DataMapFilter getFilterPredicates(Configuration configuration) { try { String filterExprString = configuration.get(FILTER_PREDICATE); if (filterExprString == null) { - return null; + String expr = configuration.get(TableScanDesc.FILTER_EXPR_CONF_STR); + if (expr == null) { + return null; + } + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = + Utilities.deserializeObject(expr, ExprNodeGenericFuncDesc.class); + LOG.debug("hive expression:" + exprNodeGenericFuncDesc.getGenericUDF()); Review comment: please add isDebugEnabled() check, here and all other places ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
In reply to this post by GitBox
dhatchayani commented on a change in pull request #3557: [CARBONDATA-3649] Hive expression is pushed down to carbon
URL: https://github.com/apache/carbondata/pull/3557#discussion_r363651256 ########## File path: hadoop/src/main/java/org/apache/carbondata/hadoop/util/Hive2CarbonExpression.java ########## @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.hadoop.util; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.common.logging.LogServiceFactory; +import org.apache.carbondata.core.metadata.datatype.DataType; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.scan.expression.ColumnExpression; +import org.apache.carbondata.core.scan.expression.Expression; +import org.apache.carbondata.core.scan.expression.LiteralExpression; +import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.GreaterThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.InExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanEqualToExpression; +import org.apache.carbondata.core.scan.expression.conditional.LessThanExpression; +import org.apache.carbondata.core.scan.expression.conditional.ListExpression; +import org.apache.carbondata.core.scan.expression.conditional.NotEqualsExpression; +import org.apache.carbondata.core.scan.expression.logical.AndExpression; +import org.apache.carbondata.core.scan.expression.logical.OrExpression; +import org.apache.carbondata.hadoop.api.CarbonInputFormat; + +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFIn; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull; +import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr; +import org.apache.log4j.Logger; + +/** + * @description: hive expression to carbon expression + */ +public class Hive2CarbonExpression { + public static final int left = 0; + public static final int right = 1; + private static final Logger LOG = + LogServiceFactory.getLogService(CarbonInputFormat.class.getName()); + + public static Expression convertExprHive2Carbon(ExprNodeDesc exprNodeDesc) { + if (exprNodeDesc instanceof ExprNodeGenericFuncDesc) { + ExprNodeGenericFuncDesc exprNodeGenericFuncDesc = (ExprNodeGenericFuncDesc) exprNodeDesc; + GenericUDF udf = exprNodeGenericFuncDesc.getGenericUDF(); + List<ExprNodeDesc> ll = exprNodeGenericFuncDesc.getChildren(); Review comment: please rename l1 and add comments for better readability. add comments to the method also ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] With regards, Apache Git Services |
Free forum by Nabble | Edit this page |