[ https://issues.apache.org/jira/browse/CARBONDATA-918?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ] SWATI RAO closed CARBONDATA-918. -------------------------------- Resolution: Fixed Its Working fine now. > Select query is not working for Complex datatype > ------------------------------------------------ > > Key: CARBONDATA-918 > URL: https://issues.apache.org/jira/browse/CARBONDATA-918 > Project: CarbonData > Issue Type: Bug > Affects Versions: 1.0.0-incubating > Environment: Spark 2.1 > Reporter: SWATI RAO > Assignee: Srigopal Mohanty > Attachments: Array.csv > > > Select Query is not working in carbondata for complex datatype: > CARBONDATA: > 0: jdbc:hive2://hadoop-master:10000> create table Array_com (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, ARRAY_INT array<int>,ARRAY_STRING array<string>,ARRAY_DATE array<timestamp>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT double, HQ_DEPOSIT double) STORED BY 'org.apache.carbondata.format' ; > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (3.567 seconds) > 0: jdbc:hive2://hadoop-master:10000> LOAD DATA INPATH 'hdfs://192.168.2.145:54310/BabuStore/Data/complex/Array.csv' INTO table Array_com options ('DELIMITER'=',', 'QUOTECHAR'='"', 'FILEHEADER'='CUST_ID,YEAR,MONTH,AGE,GENDER,EDUCATED,IS_MARRIED,ARRAY_INT,ARRAY_STRING,ARRAY_DATE,CARD_COUNT,DEBIT_COUNT,CREDIT_COUNT,DEPOSIT,HQ_DEPOSIT','COMPLEX_DELIMITER_LEVEL_1'='$') ; > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (6.541 seconds) > 0: jdbc:hive2://hadoop-master:10000> select array_int[0], array_int[0]+ 10 as a from array_com ; > 0: jdbc:hive2://192.168.2.126:10000> select * from array_com ; > Error: org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 254.0 failed 1 times, most recent failure: Lost task 1.0 in stage 254.0 (TID 8631, localhost, executor driver): java.lang.NullPointerException > at org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.getLastModifiedTime(AbstractDFSCarbonFile.java:135) > at org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.isFileModified(AbstractDFSCarbonFile.java:210) > at org.apache.carbondata.core.cache.dictionary.AbstractDictionaryCache.isDictionaryMetaFileModified(AbstractDictionaryCache.java:119) > at org.apache.carbondata.core.cache.dictionary.AbstractDictionaryCache.checkAndLoadDictionaryData(AbstractDictionaryCache.java:159) > at org.apache.carbondata.core.cache.dictionary.ForwardDictionaryCache.getDictionary(ForwardDictionaryCache.java:195) > at org.apache.carbondata.core.cache.dictionary.ForwardDictionaryCache.get(ForwardDictionaryCache.java:71) > at org.apache.carbondata.core.cache.dictionary.ForwardDictionaryCache.get(ForwardDictionaryCache.java:40) > at org.apache.carbondata.hadoop.readsupport.impl.DictionaryDecodeReadSupport.initialize(DictionaryDecodeReadSupport.java:65) > at org.apache.carbondata.spark.readsupport.SparkRowReadSupportImpl.initialize(SparkRowReadSupportImpl.java:33) > at org.apache.carbondata.hadoop.CarbonRecordReader.initialize(CarbonRecordReader.java:76) > at org.apache.carbondata.spark.rdd.CarbonScanRDD.compute(CarbonScanRDD.scala:204) > at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) > at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) > at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) > at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) > at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) > at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) > at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323) > at org.apache.spark.rdd.RDD.iterator(RDD.scala:287) > at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) > at org.apache.spark.scheduler.Task.run(Task.scala:99) > at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282) > at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > at java.lang.Thread.run(Thread.java:745) > Driver stacktrace: (state=,code=0) > HIVE: > create table Array_com_h (CUST_ID string, YEAR int, MONTH int, AGE int, GENDER string, EDUCATED string, IS_MARRIED string, ARRAY_INT array<int>,ARRAY_STRING array<string>,ARRAY_DATE array<timestamp>,CARD_COUNT int,DEBIT_COUNT int, CREDIT_COUNT int, DEPOSIT double, HQ_DEPOSIT double); > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (6.541 seconds) > load data local inpath '/opt/Carbon/CarbonData/TestData/Data/complex/Array.csv' INTO table Array_com_h; > +---------+--+ > | Result | > +---------+--+ > +---------+--+ > No rows selected (6.541 seconds) > > 0: jdbc:hive2://192.168.2.126:10000> select * from Array_com_h; > +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+-------+--------+-------+---------+-----------+-------------+------------+---------------+-------------+-------------+--------------+---------------+----------+-------------+--+ > | CUST_ID | YEAR | MONTH | AGE | GENDER | EDUCATED | IS_MARRIED | ARRAY_INT | ARRAY_STRING | ARRAY_DATE | CARD_COUNT | DEBIT_COUNT | CREDIT_COUNT | DEPOSIT | HQ_DEPOSIT | > +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+-------+--------+-------+---------+-----------+-------------+------------+---------------+-------------+-------------+--------------+---------------+----------+-------------+--+ > | Cust00000000000000000000,2015,1,20,M,SSC,Y,1234$5678$9101$11121$12357,United Kingdom$England$Basildon$AAID001001$United Kingdom$England$Basildon$AD003001$AAID001001$United Kingdom$England$Basildon$AD003001$United Kingdom$England$Basildon$COUNTY00045,2015-01-01 00:00:00$2014-01-01 00:00:00$2013-01-01 00:00:00$2012-01-01 00:00:00$2011-01-01 00:00:00,21,55,58,337982404.6,989431364.6 | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | > | Cust00000000000000000001,2015,1,30,F,Degree,N,1235$5679$9102$11122$12358,United States$MO$Parkville$AAID001002$United States$MO$Parkville$AD003002$AAID001002$United States$MO$Parkville$AD003002$United States$MO$Parkville$COUNTY00046,2015-01-02 00:00:00$2014-01-02 00:00:00$2013-01-02 00:00:00$2012-01-02 00:00:00$2011-01-02 00:00:00,104,59,50,686815400.5,157442142.4 | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | > | Cust00000000000000000002,2015,1,40,M,graduation,D,1236$5680$9103$11123$12359,United States$OR$Astoria$AAID001003$United States$OR$Astoria$AD003003$AAID001003$United States$OR$Astoria$AD003003$United States$OR$Astoria$COUNTY00047,2015-01-03 00:00:00$2014-01-03 00:00:00$2013-01-03 00:00:00$2012-01-03 00:00:00$2011-01-03 00:00:00,141,190,145,106733870.5,182602141 | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | > | Cust00000000000000000003,2015,1,50,F,PG,Y,1237$5681$9104$11124$12360,Australia$Victoria$Echuca$AAID001004$Australia$Victoria$Echuca$AD003004$AAID001004$Australia$Victoria$Echuca$AD003004$Australia$Victoria$Echuca$COUNTY00048,2015-01-04 00:00:00$2014-01-04 00:00:00$2013-01-04 00:00:00$2012-01-04 00:00:00$2011-01-04 00:00:00,162,162,129,702614376.9,499071850.4 | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | NULL | > | Cust00000000000000000004,2015,1,60,M,MS,N,1238$5682$9105$11125$12361,United States$AL$Cahaba Heights$AAID001005$United States$AL$Cahaba Heights$AD003005$AAID001005$United States$AL$Cahaba Heights$AD003005$United States$AL$Cahaba Heights$COUNTY00049,2015-01-05 00:00:00$2014-01-05 00:00:00$2013-01-05 00:00:00$2012-01-05 00:00:00$2011-01-05 00:00:00,35,139,93,469745206.2,480746358.2 -- This message was sent by Atlassian JIRA (v6.3.15#6346) |
Free forum by Nabble | Edit this page |