[jira] [Updated] (CARBONDATA-1157) Dependency conflict when executing load query on multinode cluster with Spark 1.6

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[jira] [Updated] (CARBONDATA-1157) Dependency conflict when executing load query on multinode cluster with Spark 1.6

Akash R Nilugal (Jira)

     [ https://issues.apache.org/jira/browse/CARBONDATA-1157?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

SWATI RAO updated CARBONDATA-1157:
----------------------------------
    Description:
Steps to Reproduce:
Use multi-node Cluster to reproduce this as it is working fine on single node.

*1. Create table :*
create table myvmall (imei String,uuid String,MAC String,device_color String,device_shell_color String,device_name String,product_name String,ram String,rom String,cpu_clock String,series String,check_date String,check_year int,check_month int ,check_day int,check_hour int,bom String,inside_name String,packing_date String,packing_year String,packing_month String,packing_day String,packing_hour String,customer_name String,deliveryAreaId String,deliveryCountry String,deliveryProvince String,deliveryCity String,deliveryDistrict String,packing_list_no String,order_no String,Active_check_time String,Active_check_year int,Active_check_month int,Active_check_day int,Active_check_hour int,ActiveAreaId String,ActiveCountry String,ActiveProvince String,Activecity String,ActiveDistrict String,Active_network String,Active_firmware_version String,Active_emui_version String,Active_os_version String,Latest_check_time String,Latest_check_year int,Latest_check_month int,Latest_check_day int,Latest_check_hour int,Latest_areaId String,Latest_country String,Latest_province String,Latest_city String,Latest_district String,Latest_firmware_version String,Latest_emui_version String,Latest_os_version String,Latest_network String,site String,site_desc String,product String,product_desc String) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES ('DICTIONARY_INCLUDE'='check_year,check_month,check_day,check_hour,Active_check_year,Active_check_month,Active_check_day,Active_check_hour,Latest_check_year,Latest_check_month,Latest_check_day')

*2. Load Data in above table:*
LOAD DATA INPATH 'HDFS_URL/BabuStore/Data/100_VMALL_1_Day_DATA_2015-09-15.csv' INTO table myvmall options('DELIMITER'=',', 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,uuid,MAC,device_color,device_shell_color,device_name,product_name,ram,rom,cpu_clock,series,check_date,check_year,check_month,check_day,check_hour,bom,inside_name,packing_date,packing_year,packing_month,packing_day,packing_hour,customer_name,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,packing_list_no,order_no,Active_check_time,Active_check_year,Active_check_month,Active_check_day,Active_check_hour,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,Active_network,Active_firmware_version,Active_emui_version,Active_os_version,Latest_check_time,Latest_check_year,Latest_check_month,Latest_check_day,Latest_check_hour,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_firmware_version,Latest_emui_version,Latest_os_version,Latest_network,site,site_desc,product,product_desc')



java.io.InvalidClassException: org.apache.carbondata.spark.DataLoadResultImpl; local class incompatible: stream classdesc serialVersionUID = 459643937457370671, local class serialVersionUID = 870423879879520920
     [exec] at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616)
     [exec] at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630)
     [exec] at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
     [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
     [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373)
     [exec] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
     [exec] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
     [exec] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
     [exec] at org.apache.spark.scheduler.Task.run(Task.scala:89)
     [exec] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
     [exec] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
     [exec] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
     [exec] at java.lang.Thread.run(Thread.java:745)
     [exec]
     [exec] Driver stacktrace:

  was:


java.io.InvalidClassException: org.apache.carbondata.spark.DataLoadResultImpl; local class incompatible: stream classdesc serialVersionUID = 459643937457370671, local class serialVersionUID = 870423879879520920
     [exec] at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616)
     [exec] at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630)
     [exec] at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
     [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
     [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
     [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
     [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
     [exec] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373)
     [exec] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
     [exec] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
     [exec] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
     [exec] at org.apache.spark.scheduler.Task.run(Task.scala:89)
     [exec] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
     [exec] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
     [exec] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
     [exec] at java.lang.Thread.run(Thread.java:745)
     [exec]
     [exec] Driver stacktrace:


> Dependency conflict when executing load query on multinode cluster with Spark 1.6
> ---------------------------------------------------------------------------------
>
>                 Key: CARBONDATA-1157
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-1157
>             Project: CarbonData
>          Issue Type: Bug
>          Components: data-load
>    Affects Versions: 1.1.0
>         Environment: Spark 1.6
>            Reporter: SWATI RAO
>
> Steps to Reproduce:
> Use multi-node Cluster to reproduce this as it is working fine on single node.
> *1. Create table :*
> create table myvmall (imei String,uuid String,MAC String,device_color String,device_shell_color String,device_name String,product_name String,ram String,rom String,cpu_clock String,series String,check_date String,check_year int,check_month int ,check_day int,check_hour int,bom String,inside_name String,packing_date String,packing_year String,packing_month String,packing_day String,packing_hour String,customer_name String,deliveryAreaId String,deliveryCountry String,deliveryProvince String,deliveryCity String,deliveryDistrict String,packing_list_no String,order_no String,Active_check_time String,Active_check_year int,Active_check_month int,Active_check_day int,Active_check_hour int,ActiveAreaId String,ActiveCountry String,ActiveProvince String,Activecity String,ActiveDistrict String,Active_network String,Active_firmware_version String,Active_emui_version String,Active_os_version String,Latest_check_time String,Latest_check_year int,Latest_check_month int,Latest_check_day int,Latest_check_hour int,Latest_areaId String,Latest_country String,Latest_province String,Latest_city String,Latest_district String,Latest_firmware_version String,Latest_emui_version String,Latest_os_version String,Latest_network String,site String,site_desc String,product String,product_desc String) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES ('DICTIONARY_INCLUDE'='check_year,check_month,check_day,check_hour,Active_check_year,Active_check_month,Active_check_day,Active_check_hour,Latest_check_year,Latest_check_month,Latest_check_day')
> *2. Load Data in above table:*
> LOAD DATA INPATH 'HDFS_URL/BabuStore/Data/100_VMALL_1_Day_DATA_2015-09-15.csv' INTO table myvmall options('DELIMITER'=',', 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='imei,uuid,MAC,device_color,device_shell_color,device_name,product_name,ram,rom,cpu_clock,series,check_date,check_year,check_month,check_day,check_hour,bom,inside_name,packing_date,packing_year,packing_month,packing_day,packing_hour,customer_name,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,packing_list_no,order_no,Active_check_time,Active_check_year,Active_check_month,Active_check_day,Active_check_hour,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,Active_network,Active_firmware_version,Active_emui_version,Active_os_version,Latest_check_time,Latest_check_year,Latest_check_month,Latest_check_day,Latest_check_hour,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_firmware_version,Latest_emui_version,Latest_os_version,Latest_network,site,site_desc,product,product_desc')
> java.io.InvalidClassException: org.apache.carbondata.spark.DataLoadResultImpl; local class incompatible: stream classdesc serialVersionUID = 459643937457370671, local class serialVersionUID = 870423879879520920
>      [exec] at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616)
>      [exec] at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630)
>      [exec] at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521)
>      [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781)
>      [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
>      [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
>      [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
>      [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
>      [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
>      [exec] at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018)
>      [exec] at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942)
>      [exec] at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808)
>      [exec] at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353)
>      [exec] at java.io.ObjectInputStream.readObject(ObjectInputStream.java:373)
>      [exec] at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:76)
>      [exec] at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:115)
>      [exec] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)
>      [exec] at org.apache.spark.scheduler.Task.run(Task.scala:89)
>      [exec] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
>      [exec] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>      [exec] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>      [exec] at java.lang.Thread.run(Thread.java:745)
>      [exec]
>      [exec] Driver stacktrace:



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)