Github user ravipesala commented on the issue:
https://github.com/apache/carbondata/pull/1865 SDV Build Success , Please check CI http://144.76.159.231:8080/job/ApacheSDVTests/3163/ --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on the issue:
https://github.com/apache/carbondata/pull/1865 @jackylk @kumarvishal09 @sraghunandan Please review it. --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164322700 --- Diff: integration/spark-common/src/main/java/org/apache/carbondata/spark/exception/UnsupportedDataMapException.java --- @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.spark.exception; + +import static org.apache.carbondata.core.metadata.schema.table.DataMapClassName.PREAGGREGATE; +import static org.apache.carbondata.core.metadata.schema.table.DataMapClassName.TIMESERIES; + +/** + * Throw exception when using unsupported datamap type + */ +public class UnsupportedDataMapException extends MalformedCarbonCommandException { + /** + * default serial version ID. + */ + private static final long serialVersionUID = 1L; + + public UnsupportedDataMapException(String dataMapType) { + super("Don't support using " + dataMapType + --- End diff -- change the sentence as "Unknown data map type " + dataMaType.Please use one of --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164323056 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { --- End diff -- why to return the same value? if required only for validation, can change method name and return true/false --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164323109 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 --- End diff -- change to count --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164323826 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") + } else if (sum == 1) { + true + } else { + false + } + + if (granularity && !dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"It should using ${TIMESERIES.getName}") + } else if (!granularity && dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } else if (granularity) { + true + } else { + false + } + } + + def getTimeSeriesGranularityDetails( --- End diff -- Better to iterate over enum values and check the default values. --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164323979 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") + } else if (sum == 1) { + true + } else { + false + } + + if (granularity && !dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"It should using ${TIMESERIES.getName}") --- End diff -- ${TIMESERIES.getName} keyword missing --- |
In reply to this post by qiuchenjian-2
Github user sraghunandan commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164323882 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") --- End diff -- Only one granularity level can be defined --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164326309 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { --- End diff -- I need know what is the key, and then remove it for updatedDmProperties val updatedDmProperties = dmproperties - TimeSeriesUtil.getGranularityKey(dmproperties) --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164326318 --- Diff: integration/spark-common/src/main/java/org/apache/carbondata/spark/exception/UnsupportedDataMapException.java --- @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.carbondata.spark.exception; + +import static org.apache.carbondata.core.metadata.schema.table.DataMapClassName.PREAGGREGATE; +import static org.apache.carbondata.core.metadata.schema.table.DataMapClassName.TIMESERIES; + +/** + * Throw exception when using unsupported datamap type + */ +public class UnsupportedDataMapException extends MalformedCarbonCommandException { + /** + * default serial version ID. + */ + private static final long serialVersionUID = 1L; + + public UnsupportedDataMapException(String dataMapType) { + super("Don't support using " + dataMapType + --- End diff -- ok, done --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164326326 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 --- End diff -- ok, done --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164326333 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") --- End diff -- ok, done --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164326340 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") + } else if (sum == 1) { + true + } else { + false + } + + if (granularity && !dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"It should using ${TIMESERIES.getName}") --- End diff -- ok, done --- |
In reply to this post by qiuchenjian-2
Github user xubo245 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164328911 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +56,107 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + if (dmProperties.get(YEAR.getName).isDefined) { + return YEAR.getName + } + if (dmProperties.get(MONTH.getName).isDefined) { + return MONTH.getName + } + if (dmProperties.get(DAY.getName).isDefined) { + return DAY.getName + } + if (dmProperties.get(HOUR.getName).isDefined) { + return HOUR.getName + } + if (dmProperties.get(MINUTE.getName).isDefined) { + return MINUTE.getName + } + if (dmProperties.get(SECOND.getName).isDefined) { + return SECOND.getName + } + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var sum = 0 + + if (dmProperties.get(YEAR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MONTH.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(DAY.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(HOUR.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(MINUTE.getName).isDefined) { + sum = sum + 1 + } + if (dmProperties.get(SECOND.getName).isDefined) { + sum = sum + 1 + } + + val granularity = if (sum > 1 || sum < 0) { + throw new UnsupportedDataMapException( + s"Granularity only support one") + } else if (sum == 1) { + true + } else { + false + } + + if (granularity && !dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"It should using ${TIMESERIES.getName}") + } else if (!granularity && dmClassName.equalsIgnoreCase(TIMESERIES.getName)) { + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } else if (granularity) { + true + } else { + false + } + } + + def getTimeSeriesGranularityDetails( --- End diff -- ok, done --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1865 Build Success with Spark 2.1.0, Please check CI http://136.243.101.176:8080/job/ApacheCarbonPRBuilder1/3202/ --- |
In reply to this post by qiuchenjian-2
Github user CarbonDataQA commented on the issue:
https://github.com/apache/carbondata/pull/1865 Build Success with Spark 2.2.1, Please check CI http://88.99.58.216:8080/job/ApacheCarbonPRBuilder/1968/ --- |
In reply to this post by qiuchenjian-2
Github user kumarvishal09 commented on the issue:
https://github.com/apache/carbondata/pull/1865 @xubo245 Please update design document for This pr as you are changing the ddl for timeseries and add The new DDL in PR Description --- |
In reply to this post by qiuchenjian-2
Github user kumarvishal09 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164374509 --- Diff: integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala --- @@ -212,6 +215,60 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll { sql("drop datamap agg0 on table maintable") } + val timeSeries = TIMESERIES.getName + + test("test PreAggregate table selection: create with preaggregate and hierarchy") { + sql("drop table if exists maintabletime") + sql( + """ + | create table maintabletime(year int,month int,name string,salary int,dob string) + | stored by 'carbondata' + | tblproperties( + | 'sort_scope'='Global_sort', + | 'table_blocksize'='23', + | 'sort_columns'='month,year,name') + """.stripMargin) + sql("insert into maintabletime select 10,11,'x',12,'2014-01-01 00:00:00'") + sql( + s""" + | create datamap agg0 on table maintabletime + | using 'preaggregate' + | as select dob,name from maintabletime + | group by dob,name + """.stripMargin) + val e = intercept[MalformedCarbonCommandException] { + sql( + s""" + | create datamap agg1 on table maintabletime + | using 'preaggregate' + | DMPROPERTIES ( + | 'event_time'='dob', + | 'second_granularity'='1') + | as select dob,name from maintabletime + | group by dob,name + """.stripMargin) + } + assert(e.getMessage.contains(s"$timeSeries keyword missing")) + sql("drop table if exists maintabletime") + } + + test("test pre agg create table 21: using") { + sql("drop datamap agg0 on table maintable") + + val e: Exception = intercept[Exception] { + sql( + """ + | create datamap agg0 on table mainTable + | using 'abc' + | as select column3, sum(column3),column5, sum(column5) + | from maintable + | group by column3,column5,column2 + """.stripMargin) + } + assert(e.getMessage.contains( + s"Unknown data map type abc, Please use one of $PREAGGREGATE or $TIMESERIES")) --- End diff -- Please update the message to Unknown Data map type , Please use PreAggregate or timeseries is not required as there can be n number of data map type --- |
In reply to this post by qiuchenjian-2
Github user kumarvishal09 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164374779 --- Diff: integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggregateTableSelection.scala --- @@ -277,12 +281,21 @@ test("test PreAggregate table selection with timeseries and normal together") { sql( "create datamap agg0 on table maintabletime using 'preaggregate' as select dob,name from " + "maintabletime group by dob,name") - sql( - "create datamap agg1 on table maintabletime using 'preaggregate' DMPROPERTIES ('timeseries" + - ".eventTime'='dob', 'timeseries.hierarchy'='hour=1,day=1,month=1,year=1') as select dob," + - "name from maintabletime group by dob,name") + + sql( + s""" + | create datamap agg1_year on table maintabletime + | using '$timeSeries' + | DMPROPERTIES ( + | 'event_time'='dob', + | 'year_granularity'='1') --- End diff -- Can u please add some test case for granularity value validation , for example when use has given value 1.5 or 2 --- |
In reply to this post by qiuchenjian-2
Github user kumarvishal09 commented on a diff in the pull request:
https://github.com/apache/carbondata/pull/1865#discussion_r164379203 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/timeseries/TimeSeriesUtil.scala --- @@ -54,6 +55,67 @@ object TimeSeriesUtil { } } + def getGranularityKey(dmProperties: Map[String, String]): String = { + + for (granularity <- Granularity.values()) { + if (dmProperties.get(granularity.getName).isDefined) { + return granularity.getName + } + } + + throw new CarbonIllegalArgumentException( + s"${TIMESERIES.getName} should define time granularity") + } + + def validateTimeSeriesGranularity( + dmProperties: Map[String, String], + dmClassName: String): Boolean = { + var count = 0 --- End diff -- Please use boolean isFound instead for int --- |
Free forum by Nabble | Edit this page |