nihal0107 commented on a change in pull request #3819: URL: https://github.com/apache/carbondata/pull/3819#discussion_r467937372 ########## File path: sdk/sdk/src/main/java/org/apache/carbondata/sdk/file/ORCCarbonWriter.java ########## @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.sdk.file; + +import java.io.File; +import java.io.IOException; +import java.util.*; + +import org.apache.carbondata.sdk.file.utils.SDKUtil; + +import org.apache.hadoop.hive.ql.io.orc.OrcStruct; +import org.apache.hadoop.hive.ql.io.orc.Reader; +import org.apache.hadoop.hive.ql.io.orc.RecordReader; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.io.Text; + +/** + * Implementation to write ORC rows in CSV format to carbondata file. + */ +public class ORCCarbonWriter extends CSVCarbonWriter { + private CSVCarbonWriter csvCarbonWriter = null; + private Reader orcReader = null; + private File[] dataFiles; + + ORCCarbonWriter(CSVCarbonWriter csvCarbonWriter) { + this.csvCarbonWriter = csvCarbonWriter; + } + + @Override + public void setDataFiles(File[] dataFiles) { + this.dataFiles = dataFiles; + } + + /** + * Load ORC file in iterative way. + */ + @Override + public void write() throws IOException { + if (this.dataFiles == null || this.dataFiles.length == 0) { + throw new RuntimeException("'withOrcPath()' must be called to support loading ORC files"); + } + if (this.csvCarbonWriter == null) { + throw new RuntimeException("csv carbon writer can not be null"); + } + Arrays.sort(this.dataFiles); + for (File dataFile : this.dataFiles) { + this.loadSingleFile(dataFile); + } + } + + private void loadSingleFile(File file) throws IOException { + orcReader = SDKUtil.buildOrcReader(file.getPath()); + ObjectInspector objectInspector = orcReader.getObjectInspector(); + RecordReader recordReader = orcReader.rows(); + if (objectInspector instanceof StructObjectInspector) { + StructObjectInspector structObjectInspector = + (StructObjectInspector) orcReader.getObjectInspector(); + while (recordReader.hasNext()) { + Object record = recordReader.next(null); // to remove duplicacy. + List valueList = structObjectInspector.getStructFieldsDataAsList(record); + for (int i = 0; i < valueList.size(); i++) { + valueList.set(i, parseOrcObject(valueList.get(i), 0)); + } + this.csvCarbonWriter.write(valueList.toArray()); + } + } else { + while (recordReader.hasNext()) { + Object record = recordReader.next(null); // to remove duplicacy. + this.csvCarbonWriter.write(new Object[]{parseOrcObject(record, 0)}); + } + } + } + + private String parseOrcObject(Object recordObject, int level) { + if (recordObject instanceof OrcStruct) { + Objects.requireNonNull(orcReader); + StructObjectInspector structObjectInspector = (StructObjectInspector) orcReader + .getObjectInspector(); + List value = structObjectInspector.getStructFieldsDataAsList(recordObject); + for (int i = 0; i < value.size(); i++) { + value.set(i, parseOrcObject(value.get(i), level + 1)); + } + String str = listToString(value, level); + if (str.length() > 0) { + return str.substring(0, str.length() - 1); + } + return null; + } else if (recordObject instanceof ArrayList) { + ArrayList listValue = (ArrayList) recordObject; + for (int i = 0; i < listValue.size(); i++) { + listValue.set(i, parseOrcObject(listValue.get(i), level + 1)); + } + String str = listToString(listValue, level); + if (str.length() > 0) { + return str.substring(0, str.length() - 1); + } + return null; + } else if (recordObject instanceof LinkedHashMap) { + LinkedHashMap<Text, Object> keyValueRow = (LinkedHashMap<Text, Object>) recordObject; + for (Map.Entry<Text, Object> entry : keyValueRow.entrySet()) { + Object val = parseOrcObject(keyValueRow.get(entry.getKey()), level + 2); + keyValueRow.put(entry.getKey(), val); + } + StringBuilder str = new StringBuilder(); + for (Map.Entry<Text, Object> entry : keyValueRow.entrySet()) { + Text key = entry.getKey(); + str.append(key.toString()).append("$").append(keyValueRow.get(key)).append("#"); + } + if (str.length() > 0) { + return str.substring(0, str.length() - 1); + } + return null; + } + if (recordObject == null) { + return null; + } + return recordObject.toString(); + } + + private String listToString(List value, int level) { + String delimiter = ""; + if (level == 0) { + delimiter = "#"; + } + else if (level == 1) { + delimiter = "$"; Review comment: This is just for internal implementation. Not required to provide support to configure by user. ########## File path: sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java ########## @@ -846,4 +849,158 @@ public void testWritingAndReadingArrayStruct() throws IOException { } } + @Test + public void testCsvLoadAndCarbonReadWithPrimitiveType() throws IOException, InvalidLoadOptionException, InterruptedException { + String path = "./testCsvFileLoad"; + String filePath = "./src/test/resources/file/csv_files/primitive_data.csv"; + FileUtils.deleteDirectory(new File(path)); + CarbonWriterBuilder carbonWriterBuilder = new CarbonWriterBuilder(); + Field fields[] = new Field[4]; + fields[0] = new Field("id", "INT"); + fields[1] = new Field("country", "STRING"); + fields[2] = new Field("name", "STRING"); + fields[3] = new Field("salary", "INT"); + CarbonWriter carbonWriter = carbonWriterBuilder.withCsvInput(new Schema(fields)). + withCsvPath(filePath).outputPath(path).writtenBy("CSVCarbonWriter").build(); + carbonWriter.write(); + carbonWriter.close(); + File[] dataFiles = new File(path).listFiles(); + assert (Objects.requireNonNull(dataFiles).length == 2); + + CarbonReader reader = CarbonReader.builder("./testCsvFileLoad", "_temp") + .projection(new String[]{"id", "country", "name", "salary"}).build(); + int rowCount = 0; + while (reader.hasNext()) { + Object[] row = (Object[]) reader.readNextRow(); + rowCount++; + Assert.assertEquals(row[0], rowCount); + Assert.assertEquals(row[1], "china"); + Assert.assertEquals(row[2], "aaa" + rowCount); + Assert.assertEquals(row[3], 14999 + rowCount); + } + assert (rowCount == 10); + FileUtils.deleteDirectory(new File(path)); + } + + @Test + public void testCsvLoadAndCarbonReadWithComplexType() throws IOException, InterruptedException, InvalidLoadOptionException { + String path = "./testCsvFileLoad"; + String filePath = "../../examples/spark/src/main/resources/data.csv"; + FileUtils.deleteDirectory(new File(path)); + CarbonWriterBuilder carbonWriterBuilder = new CarbonWriterBuilder(); + Field fields[] = new Field[11]; + fields[0] = new Field("shortField", "SHORT"); + fields[1] = new Field("intField", "INT"); + fields[2] = new Field("bigintField", "LONG"); + fields[3] = new Field("doubleField", "DOUBLE"); + fields[4] = new Field("stringField", "STRING"); + fields[5] = new Field("timestampfield", "TIMESTAMP"); + fields[6] = new Field("decimalField", "DECIMAL"); + fields[7] = new Field("datefield", "DATE"); + fields[8] = new Field("charField", "VARCHAR"); + fields[9] = new Field("floatField", "FLOAT"); + + StructField[] structFields = new StructField[3]; + structFields[0] = new StructField("fooField", DataTypes.STRING); + structFields[1] = new StructField("barField", DataTypes.STRING); + structFields[2] = new StructField("worldField", DataTypes.STRING); + Field structType = new Field("structField", "struct", Arrays.asList(structFields)); + + fields[10] = structType; + Map<String, String> options = new HashMap<>(); + options.put("timestampformat", "yyyy/MM/dd HH:mm:ss"); + options.put("dateformat", "yyyy/MM/dd"); + options.put("complex_delimiter_level_1", "#"); + CarbonWriter carbonWriter = carbonWriterBuilder.withCsvInput(new Schema(fields)). + withCsvPath(filePath).outputPath(path).writtenBy("CSVCarbonWriter").withLoadOptions(options).build(); + carbonWriter.write(); + carbonWriter.close(); + File[] dataFiles = new File(path).listFiles(); + assert (Objects.requireNonNull(dataFiles).length == 2); + + CarbonReader reader = CarbonReader.builder("./testCsvFileLoad", "_temp") + .projection(new String[]{"structfield"}).build(); + int rowCount = 0; + while (reader.hasNext()) { + Object[] row = (Object[]) reader.readNextRow(); + Object[] structCol = (Object[]) row[0]; + assert (structCol.length == 3); + Assert.assertEquals(structCol[0], "'foo'"); + Assert.assertEquals(structCol[1], "'bar'"); + Assert.assertEquals(structCol[2], "'world'"); + rowCount++; + } + assert (rowCount == 10); + FileUtils.deleteDirectory(new File(path)); + } + + @Test + public void testMultipleCsvFileLoad() throws IOException, InvalidLoadOptionException, InterruptedException { + String path = "./testCsvFileLoad"; + String filePath = "./src/test/resources/file/csv_files"; + FileUtils.deleteDirectory(new File(path)); + CarbonWriterBuilder carbonWriterBuilder = new CarbonWriterBuilder(); + Field fields[] = new Field[4]; + fields[0] = new Field("id", "INT"); + fields[1] = new Field("country", "STRING"); + fields[2] = new Field("name", "STRING"); + fields[3] = new Field("salary", "INT"); + CarbonWriter carbonWriter = carbonWriterBuilder.withCsvInput(new Schema(fields)). + withCsvPath(filePath).outputPath(path).writtenBy("CSVCarbonWriter").build(); + carbonWriter.write(); + carbonWriter.close(); + File[] dataFiles = new File(path).listFiles(); + assert (Objects.requireNonNull(dataFiles).length == 2); + + CarbonReader reader = CarbonReader.builder("./testCsvFileLoad", "_temp") + .projection(new String[]{"id", "country", "name", "salary"}).build(); + int rowCount = 0; + while (reader.hasNext()) { + Object[] row = (Object[]) reader.readNextRow(); + rowCount++; + Assert.assertEquals(row[0], rowCount); + Assert.assertEquals(row[1], "china"); + Assert.assertEquals(row[2], "aaa" + rowCount); + Assert.assertEquals(row[3], 14999 + rowCount); + } + assert (rowCount == 30); + FileUtils.deleteDirectory(new File(path)); + } + + @Test + public void testSelectedCsvFileLoadInDirectory() throws IOException, + InvalidLoadOptionException, InterruptedException { + String path = "./testCsvFileLoad"; + String filePath = "./src/test/resources/file/csv_files"; + FileUtils.deleteDirectory(new File(path)); + CarbonWriterBuilder carbonWriterBuilder = new CarbonWriterBuilder(); + Field fields[] = new Field[4]; + fields[0] = new Field("id", "INT"); + fields[1] = new Field("country", "STRING"); + fields[2] = new Field("name", "STRING"); + fields[3] = new Field("salary", "INT"); + List<String> fileList = new ArrayList<>(); + fileList.add("primitive_data_2.csv"); + fileList.add("primitive_data_3.csv"); + CarbonWriter carbonWriter = carbonWriterBuilder.withCsvInput(new Schema(fields)). + withCsvPath(filePath, fileList).outputPath(path).writtenBy("CSVCarbonWriter").build(); + carbonWriter.write(); + carbonWriter.close(); + File[] dataFiles = new File(path).listFiles(); + assert (Objects.requireNonNull(dataFiles).length == 2); + + CarbonReader reader = CarbonReader.builder("./testCsvFileLoad", "_temp") Review comment: Done. ########## File path: sdk/sdk/src/test/java/org/apache/carbondata/sdk/file/JSONCarbonWriterTest.java ########## @@ -0,0 +1,249 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.sdk.file; + +import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException; +import org.apache.carbondata.core.metadata.datatype.DataTypes; +import org.apache.carbondata.core.metadata.datatype.Field; +import org.apache.carbondata.core.metadata.datatype.StructField; +import org.apache.commons.io.FileUtils; +import org.junit.Assert; +import org.junit.Test; + +import java.io.File; +import java.io.IOException; +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Test suite for {@link JsonCarbonWriter} + */ +public class JSONCarbonWriterTest { + + private Schema buildSchema() { + Field[] fields = new Field[9]; + fields[0] = new Field("stringField", "STRING"); + fields[1] = new Field("intField", "INT"); + fields[2] = new Field("shortField", "SHORT"); + fields[3] = new Field("longField", "LONG"); + fields[4] = new Field("doubleField", "DOUBLE"); + fields[5] = new Field("boolField", "BOOLEAN"); + fields[6] = new Field("dateField", "DATE"); + fields[7] = new Field("timeField", "TIMESTAMP"); + fields[8] = new Field("decimalField", "DECIMAL"); + return new Schema(fields); + } + + @Test + public void testJsonFileLoadSingleRow() throws IOException, + InvalidLoadOptionException, InterruptedException { + String filePath = "./src/test/resources/file/json_files/allPrimitiveType.json"; + String path = "./testLoadJsonFIle"; + FileUtils.deleteDirectory(new File(path)); + Schema schema = buildSchema(); + CarbonWriterBuilder carbonWriterBuilder = new CarbonWriterBuilder(); + CarbonWriter carbonWriter = carbonWriterBuilder.withJsonPath(filePath).outputPath(path) + .withJsonInput(schema).writtenBy("JSONCarbonWriterTest").build(); + carbonWriter.write(); + carbonWriter.close(); + File[] dataFiles = new File(path).listFiles(); + assert (Objects.requireNonNull(dataFiles).length == 2); + + CarbonReader reader = CarbonReader.builder("./testLoadJsonFIle", "_temp") + .projection(new String[]{"stringField", "boolField", "decimalField", "longField"}).build(); + int id = 0; + while (reader.hasNext()) { + Object[] row = (Object[]) reader.readNextRow(); + Assert.assertEquals(row[0], "nihal\"ojha\""); + Assert.assertEquals(row[1], false); + Assert.assertEquals(row[2], (new BigDecimal("55.35")).setScale(2, BigDecimal.ROUND_FLOOR)); + Assert.assertEquals(row[3], (long) 1234567); + id++; + } + assert (id == 1); + FileUtils.deleteDirectory(new File(path)); Review comment: Done ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [hidden email] |
Free forum by Nabble | Edit this page |