[ https://issues.apache.org/jira/browse/CARBONDATA-642?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] Ravindra Pesala resolved CARBONDATA-642. ---------------------------------------- Resolution: Fixed Fix Version/s: 1.1.0 > Delete Subquery is not working while creating and loading 2 tables > ------------------------------------------------------------------- > > Key: CARBONDATA-642 > URL: https://issues.apache.org/jira/browse/CARBONDATA-642 > Project: CarbonData > Issue Type: Bug > Affects Versions: 1.0.0-incubating > Reporter: Vyom Rastogi > Assignee: Vinod KC > Priority: Minor > Labels: Spark-1.6 > Fix For: 1.1.0 > > Time Spent: 1h 20m > Remaining Estimate: 0h > > Create table uiqdata > CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,INTEGER_COLUMN1,CUST_ID'); > Create table uniqdata1 > CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,INTEGER_COLUMN1,CUST_ID'); > Load the data into uniqdata > LOAD DATA INPATH 'hdfs://hacluster/vyom/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); > Load the data into uniqdata1 > LOAD DATA INPATH 'hdfs://hacluster/vyom/2000_UniqData.csv' into table uniqdata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1'); > Delete Subquery with below error > Error: org.apache.spark.sql.AnalysisException: > Unsupported language features in query: select tupleId from uniqdata1 a where a.CUST_ID in (Select b.CUST_ID from (Select c.CUST_ID from uniqdata c ) b) > TOK_QUERY 1, 0,45, 20 > TOK_FROM 1, 4,8, 20 > TOK_TABREF 1, 6,8, 20 > TOK_TABNAME 1, 6,6, 20 > uniqdata1 1, 6,6, 20 > a 1, 8,8, 30 > TOK_INSERT 0, -1,45, 0 > TOK_DESTINATION 0, -1,-1, 0 > TOK_DIR 0, -1,-1, 0 > TOK_TMP_FILE 0, -1,-1, 0 > TOK_SELECT 1, 0,2, 7 > TOK_SELEXPR 1, 2,2, 7 > TOK_TABLE_OR_COL 1, 2,2, 7 > tupleId 1, 2,2, 7 > TOK_WHERE 1, 10,45, 49 > TOK_SUBQUERY_EXPR 1, 13,45, 49 > TOK_SUBQUERY_OP 1, 17,17, 49 > in 1, 17,17, 49 > TOK_QUERY 1, 19,45, 99 > TOK_FROM 1, 26,44, 99 > TOK_SUBQUERY 1, 28,44, 99 > TOK_QUERY 1, 29,40, 99 > TOK_FROM 1, 35,40, 99 > TOK_TABREF 1, 38,40, 99 > TOK_TABNAME 1, 38,38, 99 > uniqdata 1, 38,38, 99 > c 1, 40,40, 108 > TOK_INSERT 0, -1,33, 0 > TOK_DESTINATION 0, -1,-1, 0 > TOK_DIR 0, -1,-1, 0 > TOK_TMP_FILE 0, -1,-1, 0 > TOK_SELECT 1, 29,33, 84 > TOK_SELEXPR 1, 31,33, 84 > . 1, 31,33, 84 > TOK_TABLE_OR_COL 1, 31,31, 83 > c 1, 31,31, 83 > CUST_ID 1, 33,33, 85 > b 1, 44,44, 112 > TOK_INSERT 0, -1,24, 0 > TOK_DESTINATION 0, -1,-1, 0 > TOK_DIR 0, -1,-1, 0 > TOK_TMP_FILE 0, -1,-1, 0 > TOK_SELECT 1, 20,24, 61 > TOK_SELEXPR 1, 22,24, 61 > Csv: 2000_UniqData.csv -- This message was sent by Atlassian JIRA (v6.3.15#6346) |
Free forum by Nabble | Edit this page |