Github user ajithme commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699308 --- Diff: hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonInputSplit.java --- @@ -444,4 +444,16 @@ public void setFormat(FileFormat fileFormat) { public Blocklet makeBlocklet() { return new Blocklet(getPath().getName(), blockletId); } + + public String[] preferredLocations() { --- End diff -- The super FileSplit.file is not serializable. Refer HADOOP-13519 so java serialization may return empty --- |
In reply to this post by qiuchenjian-2
Github user ajithme commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699345 --- Diff: store/sdk/src/main/java/org/apache/carbondata/sdk/store/BlockScanUnit.java --- @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.sdk.store; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.hadoop.CarbonInputSplit; + +/** + * It contains a block to scan, and a destination worker who should scan it + */ +@InterfaceAudience.Internal +public class BlockScanUnit implements ScanUnit { + + // the data block to scan + private CarbonInputSplit inputSplit; + + // the worker who should scan this unit + private Schedulable schedulable; --- End diff -- Add this in Writable interface else it will be null after deserialization --- |
In reply to this post by qiuchenjian-2
Github user ajithme commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699358 --- Diff: store/sdk/src/main/java/org/apache/carbondata/sdk/store/ScanUnit.java --- @@ -15,26 +15,27 @@ * limitations under the License. */ -package org.apache.carbondata.store.impl.rpc; +package org.apache.carbondata.sdk.store; -import org.apache.carbondata.common.annotations.InterfaceAudience; -import org.apache.carbondata.store.impl.rpc.model.BaseResponse; -import org.apache.carbondata.store.impl.rpc.model.LoadDataRequest; -import org.apache.carbondata.store.impl.rpc.model.QueryResponse; -import org.apache.carbondata.store.impl.rpc.model.Scan; -import org.apache.carbondata.store.impl.rpc.model.ShutdownRequest; -import org.apache.carbondata.store.impl.rpc.model.ShutdownResponse; - -import org.apache.hadoop.ipc.VersionedProtocol; - -@InterfaceAudience.Internal -public interface StoreService extends VersionedProtocol { - - long versionID = 1L; +import java.io.Serializable; - BaseResponse loadData(LoadDataRequest request); - - QueryResponse query(Scan scan); +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.common.annotations.InterfaceStability; +import org.apache.carbondata.core.metadata.schema.table.Writable; - ShutdownResponse shutdown(ShutdownRequest request); +/** + * An unit for the scanner in Carbon Store + */ +@InterfaceAudience.User +@InterfaceStability.Unstable +public interface ScanUnit<T> extends Serializable, Writable { --- End diff -- can remove Generics --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699719 --- Diff: hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonInputSplit.java --- @@ -444,4 +444,16 @@ public void setFormat(FileFormat fileFormat) { public Blocklet makeBlocklet() { return new Blocklet(getPath().getName(), blockletId); } + + public String[] preferredLocations() { --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699726 --- Diff: store/sdk/src/main/java/org/apache/carbondata/sdk/store/BlockScanUnit.java --- @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.sdk.store; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.hadoop.CarbonInputSplit; + +/** + * It contains a block to scan, and a destination worker who should scan it + */ +@InterfaceAudience.Internal +public class BlockScanUnit implements ScanUnit { + + // the data block to scan + private CarbonInputSplit inputSplit; + + // the worker who should scan this unit + private Schedulable schedulable; --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user jackylk commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207699730 --- Diff: store/sdk/src/main/java/org/apache/carbondata/sdk/store/ScanUnit.java --- @@ -15,26 +15,27 @@ * limitations under the License. */ -package org.apache.carbondata.store.impl.rpc; +package org.apache.carbondata.sdk.store; -import org.apache.carbondata.common.annotations.InterfaceAudience; -import org.apache.carbondata.store.impl.rpc.model.BaseResponse; -import org.apache.carbondata.store.impl.rpc.model.LoadDataRequest; -import org.apache.carbondata.store.impl.rpc.model.QueryResponse; -import org.apache.carbondata.store.impl.rpc.model.Scan; -import org.apache.carbondata.store.impl.rpc.model.ShutdownRequest; -import org.apache.carbondata.store.impl.rpc.model.ShutdownResponse; - -import org.apache.hadoop.ipc.VersionedProtocol; - -@InterfaceAudience.Internal -public interface StoreService extends VersionedProtocol { - - long versionID = 1L; +import java.io.Serializable; - BaseResponse loadData(LoadDataRequest request); - - QueryResponse query(Scan scan); +import org.apache.carbondata.common.annotations.InterfaceAudience; +import org.apache.carbondata.common.annotations.InterfaceStability; +import org.apache.carbondata.core.metadata.schema.table.Writable; - ShutdownResponse shutdown(ShutdownRequest request); +/** + * An unit for the scanner in Carbon Store + */ +@InterfaceAudience.User +@InterfaceStability.Unstable +public interface ScanUnit<T> extends Serializable, Writable { --- End diff -- fixed --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/7777/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/6501/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2589 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/6159/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/6506/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/7781/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2589 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/6164/ --- |
In reply to this post by qiuchenjian-2
Github user ajithme commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2589#discussion_r207704316 --- Diff: hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonInputSplit.java --- @@ -252,9 +262,24 @@ public Segment getSegment() { if (dataMapWriterPathExists) { dataMapWritePath = in.readUTF(); } + boolean filePathExists = in.readBoolean(); + if (filePathExists) { + filePath = in.readUTF(); + } else { + filePath = super.getPath().toString(); + } } @Override public void write(DataOutput out) throws IOException { + if (super.getPath() != null) { + super.write(out); + } else { + // see HADOOP-13519, after Java deserialization, super.filePath is + // null, so write our filePath instead + Text.writeString(out, filePath); + out.writeLong(getStart()); + out.writeLong(getLength()); + } super.write(out); --- End diff -- can delete this line else read will fail --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/7782/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/6507/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/7783/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/6508/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2589 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/6165/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2589 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/6166/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2589 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/7784/ --- |
Free forum by Nabble | Edit this page |