Github user ajantha-bhat commented on the issue:
https://github.com/apache/carbondata/pull/2131 retest this please --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2131 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4279/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/4779/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/3553/ --- |
In reply to this post by qiuchenjian-2
Github user sounakr commented on the issue:
https://github.com/apache/carbondata/pull/2131 Retest this please --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2131 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4281/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/3557/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/3559/ --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on the issue:
https://github.com/apache/carbondata/pull/2131 retest this please --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178787139 --- Diff: core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java --- @@ -143,6 +143,16 @@ private boolean hasDataMapSchema; + /** + * The boolean field which points if the data written for UnManaged Table + * or Managed Table. The difference between managed and unManaged table is + * unManaged Table will not contain any Metadata folder and subsequently + * no TableStatus or Schema files. + */ + private boolean isUnManagedTable; + + private long UUID; --- End diff -- removed --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178788948 --- Diff: core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java --- @@ -77,6 +77,19 @@ */ private String tablePath; + /** + * The boolean field which points if the data written for UnManaged Table + * or Managed Table. The difference between managed and unManaged table is + * unManaged Table will not contain any Metadata folder and subsequently + * no TableStatus or Schema files. + */ + private boolean isUnManagedTable; + + /** + * Unique ID + */ + private long UUID; --- End diff -- removed --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/3562/ --- |
In reply to this post by qiuchenjian-2
Github user gvramana commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178795222 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java --- @@ -83,28 +85,44 @@ public DataMapWriter createWriter(Segment segment, String writeDirectoryPath) { } @Override - public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException { + public List<CoarseGrainDataMap> getDataMaps(Segment segment, ReadCommitted readCommitted) + throws IOException { List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers = - getTableBlockIndexUniqueIdentifiers(segment); + getTableBlockIndexUniqueIdentifiers(segment, readCommitted); return cache.getAll(tableBlockIndexUniqueIdentifiers); } - private List<TableBlockIndexUniqueIdentifier> getTableBlockIndexUniqueIdentifiers( - Segment segment) throws IOException { + private List<TableBlockIndexUniqueIdentifier> getTableBlockIndexUniqueIdentifiers(Segment segment, + ReadCommitted readCommitted) throws IOException { List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers = segmentMap.get(segment.getSegmentNo()); if (tableBlockIndexUniqueIdentifiers == null) { tableBlockIndexUniqueIdentifiers = new ArrayList<>(); + Map<String, String> indexFiles; - if (segment.getSegmentFileName() == null) { - String path = - CarbonTablePath.getSegmentPath(identifier.getTablePath(), segment.getSegmentNo()); - indexFiles = new SegmentIndexFileStore().getIndexFilesFromSegment(path); + if (CarbonUtil.isUnmanagedCarbonTable(identifier.getTablePath(), true)) { --- End diff -- Now this logic is not required, as readCommitted.getCommittedIndexFile(segment); should handle both the cases --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/2131 Build Failed with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/4786/ --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178811570 --- Diff: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java --- @@ -224,22 +198,28 @@ protected CarbonTable getOrCreateCarbonTable(Configuration configuration) throws // do block filtering and get split List<InputSplit> splits = - getSplits(job, filterInterface, filteredSegmentToAccess, matchedPartitions, partitionInfo, - null, updateStatusManager); + getSplits(job, filterInterface, filteredSegmentToAccess, matchedPartitions, partitionInfo, + null, updateStatusManager, readCommitted); + // pass the invalid segment to task side in order to remove index entry in task side - if (invalidSegments.size() > 0) { - for (InputSplit split : splits) { - ((org.apache.carbondata.hadoop.CarbonInputSplit) split).setInvalidSegments(invalidSegments); - ((org.apache.carbondata.hadoop.CarbonInputSplit) split) - .setInvalidTimestampRange(invalidTimestampsList); + if (readCommitted instanceof TableStatusReadCommitted) { --- End diff -- done. --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178812558 --- Diff: hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java --- @@ -159,6 +162,11 @@ public static void setTablePath(Configuration configuration, String tablePath) { configuration.set(FileInputFormat.INPUT_DIR, tablePath); } + public static void setCarbonUnmanagedTable(Configuration configuration, --- End diff -- configuration.set supports string only now. followed like other config set in CarbinTableInputFormat --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on the issue:
https://github.com/apache/carbondata/pull/2131 retest this please --- |
In reply to this post by qiuchenjian-2
Github user ajantha-bhat commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/2131#discussion_r178813379 --- Diff: core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java --- @@ -83,28 +85,44 @@ public DataMapWriter createWriter(Segment segment, String writeDirectoryPath) { } @Override - public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException { + public List<CoarseGrainDataMap> getDataMaps(Segment segment, ReadCommitted readCommitted) + throws IOException { List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers = - getTableBlockIndexUniqueIdentifiers(segment); + getTableBlockIndexUniqueIdentifiers(segment, readCommitted); return cache.getAll(tableBlockIndexUniqueIdentifiers); } - private List<TableBlockIndexUniqueIdentifier> getTableBlockIndexUniqueIdentifiers( - Segment segment) throws IOException { + private List<TableBlockIndexUniqueIdentifier> getTableBlockIndexUniqueIdentifiers(Segment segment, + ReadCommitted readCommitted) throws IOException { List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers = segmentMap.get(segment.getSegmentNo()); if (tableBlockIndexUniqueIdentifiers == null) { tableBlockIndexUniqueIdentifiers = new ArrayList<>(); + Map<String, String> indexFiles; - if (segment.getSegmentFileName() == null) { - String path = - CarbonTablePath.getSegmentPath(identifier.getTablePath(), segment.getSegmentNo()); - indexFiles = new SegmentIndexFileStore().getIndexFilesFromSegment(path); + if (CarbonUtil.isUnmanagedCarbonTable(identifier.getTablePath(), true)) { --- End diff -- done --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2131 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4285/ --- |
In reply to this post by qiuchenjian-2
Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/2131 SDV Build Fail , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/4287/ --- |
Free forum by Nabble | Edit this page |