[GitHub] [carbondata] xubo245 commented on a change in pull request #3819: [CARBONDATA-3855]support carbon SDK to load data from different files

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[GitHub] [carbondata] xubo245 commented on a change in pull request #3819: [CARBONDATA-3855]support carbon SDK to load data from different files

GitBox

xubo245 commented on a change in pull request #3819:
URL: https://github.com/apache/carbondata/pull/3819#discussion_r464179885



##########
File path: sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
##########
@@ -660,13 +1102,41 @@ public CarbonWriter build() throws IOException, InvalidLoadOptionException {
       // removed from the load. LoadWithoutConverter flag is going to point to the Loader Builder
       // which will skip Conversion Step.
       loadModel.setLoadWithoutConverterStep(true);
-      return new AvroCarbonWriter(loadModel, hadoopConf, this.avroSchema);
+      AvroCarbonWriter avroCarbonWriter = new AvroCarbonWriter(loadModel,
+          hadoopConf, this.avroSchema);
+      if (this.filePath != null && this.filePath.length() != 0) {
+        avroCarbonWriter.setDataFiles(this.dataFiles);
+      }
+      return avroCarbonWriter;
     } else if (this.writerType == WRITER_TYPE.JSON) {
       loadModel.setJsonFileLoad(true);
-      return new JsonCarbonWriter(loadModel, hadoopConf);
+      JsonCarbonWriter jsonCarbonWriter = new JsonCarbonWriter(loadModel, hadoopConf);
+      if (this.filePath != null && this.filePath.length() != 0) {
+        jsonCarbonWriter.setDataFiles(this.dataFiles);
+      }
+      return jsonCarbonWriter;
+    } else if (this.writerType == WRITER_TYPE.PARQUET) {
+      loadModel.setLoadWithoutConverterStep(true);
+      AvroCarbonWriter avroCarbonWriter = new AvroCarbonWriter(loadModel,
+          hadoopConf, this.avroSchema);
+      ParquetCarbonWriter parquetCarbonWriter = new ParquetCarbonWriter(avroCarbonWriter);
+      parquetCarbonWriter.setDataFiles(this.dataFiles);
+      return parquetCarbonWriter;
+    } else if (this.writerType == WRITER_TYPE.ORC) {
+      CSVCarbonWriter csvCarbonWriter = new CSVCarbonWriter(loadModel, hadoopConf);
+      ORCCarbonWriter orcCarbonWriter = new ORCCarbonWriter(csvCarbonWriter);
+      orcCarbonWriter.setDataFiles(this.dataFiles);
+      return orcCarbonWriter;
     } else {
       // CSV
-      return new CSVCarbonWriter(loadModel, hadoopConf);
+      CSVCarbonWriter csvCarbonWriter = new CSVCarbonWriter(loadModel, hadoopConf);
+      if (this.filePath != null && this.filePath.length() != 0) {

Review comment:
       StringUtils.isEmpty(this.filePath)

##########
File path: sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
##########
@@ -660,13 +1102,41 @@ public CarbonWriter build() throws IOException, InvalidLoadOptionException {
       // removed from the load. LoadWithoutConverter flag is going to point to the Loader Builder
       // which will skip Conversion Step.
       loadModel.setLoadWithoutConverterStep(true);
-      return new AvroCarbonWriter(loadModel, hadoopConf, this.avroSchema);
+      AvroCarbonWriter avroCarbonWriter = new AvroCarbonWriter(loadModel,
+          hadoopConf, this.avroSchema);
+      if (this.filePath != null && this.filePath.length() != 0) {
+        avroCarbonWriter.setDataFiles(this.dataFiles);
+      }
+      return avroCarbonWriter;
     } else if (this.writerType == WRITER_TYPE.JSON) {
       loadModel.setJsonFileLoad(true);
-      return new JsonCarbonWriter(loadModel, hadoopConf);
+      JsonCarbonWriter jsonCarbonWriter = new JsonCarbonWriter(loadModel, hadoopConf);
+      if (this.filePath != null && this.filePath.length() != 0) {
+        jsonCarbonWriter.setDataFiles(this.dataFiles);
+      }
+      return jsonCarbonWriter;
+    } else if (this.writerType == WRITER_TYPE.PARQUET) {
+      loadModel.setLoadWithoutConverterStep(true);
+      AvroCarbonWriter avroCarbonWriter = new AvroCarbonWriter(loadModel,
+          hadoopConf, this.avroSchema);
+      ParquetCarbonWriter parquetCarbonWriter = new ParquetCarbonWriter(avroCarbonWriter);
+      parquetCarbonWriter.setDataFiles(this.dataFiles);
+      return parquetCarbonWriter;
+    } else if (this.writerType == WRITER_TYPE.ORC) {
+      CSVCarbonWriter csvCarbonWriter = new CSVCarbonWriter(loadModel, hadoopConf);
+      ORCCarbonWriter orcCarbonWriter = new ORCCarbonWriter(csvCarbonWriter);
+      orcCarbonWriter.setDataFiles(this.dataFiles);
+      return orcCarbonWriter;
     } else {
       // CSV
-      return new CSVCarbonWriter(loadModel, hadoopConf);
+      CSVCarbonWriter csvCarbonWriter = new CSVCarbonWriter(loadModel, hadoopConf);
+      if (this.filePath != null && this.filePath.length() != 0) {
+        csvCarbonWriter.setDataFiles(this.dataFiles);
+        if (!this.options.containsKey("fileHeader")) {

Review comment:
       Please  use constants




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[hidden email]