You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/08/01 10:05:44 UTC
[46/47] incubator-carbondata git commit: Fixed issues after merge
Fixed issues after merge
Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/21d8c7ea
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/21d8c7ea
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/21d8c7ea
Branch: refs/heads/master
Commit: 21d8c7ea6884d820605a187dcf2bcb6787aa09a6
Parents: 50dfdf6
Author: ravipesala <ra...@gmail.com>
Authored: Mon Aug 1 15:12:44 2016 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Mon Aug 1 15:12:44 2016 +0530
----------------------------------------------------------------------
.../org/carbondata/core/util/DataTypeUtil.java | 44 ++++++++---------
.../aggregator/impl/CountStarAggregator.java | 51 --------------------
.../scan/executor/util/QueryUtil.java | 1 -
.../scan/filter/FilterExpressionProcessor.java | 1 -
.../filter/resolver/AndFilterResolverImpl.java | 1 +
.../resolver/LogicalFilterResolverImpl.java | 1 +
.../spark/sql/CarbonDictionaryDecoder.scala | 6 ++-
.../execution/command/carbonTableSchema.scala | 6 ++-
.../spark/rdd/CarbonDataRDDFactory.scala | 4 +-
.../spark/rdd/CarbonGlobalDictionaryRDD.scala | 2 -
...estampDataTypeDirectDictionaryTestCase.scala | 20 +++++---
.../filterexpr/AllDataTypesTestCaseFilter.scala | 10 ++--
12 files changed, 53 insertions(+), 94 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
index 995bf17..92316d5 100644
--- a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
@@ -29,6 +29,7 @@ import java.util.Date;
import org.carbondata.common.logging.LogService;
import org.carbondata.common.logging.LogServiceFactory;
import org.carbondata.core.carbon.metadata.datatype.DataType;
+import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
import org.carbondata.core.constants.CarbonCommonConstants;
@@ -268,7 +269,7 @@ public final class DataTypeUtil {
scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal);
org.apache.spark.sql.types.Decimal decConverter =
new org.apache.spark.sql.types.Decimal();
- return decConverter.set(scalaDecVal, 19, 2);
+ return decConverter.set(scalaDecVal);
default:
return UTF8String.fromString(data);
}
@@ -307,39 +308,36 @@ public final class DataTypeUtil {
}
/**
- * This method will parse a given string value corresponding to its datatype
+ * Below method will be used to basically to know whether any non parseable
+ * data is present or not. if present then return null so that system can
+ * process to default null member value.
*
- * @param value value to parse
- * @param dataType datatype for that value
- * @return
+ * @param data data
+ * @param actualDataType actual data type
+ * @return actual data after conversion
*/
- public static boolean validateColumnValueForItsDataType(String value, DataType dataType) {
+ public static Object normalizeIntAndLongValues(String data, DataType actualDataType) {
+ if (null == data) {
+ return null;
+ }
try {
Object parsedValue = null;
- // validation will not be done for timestamp datatype as for timestamp direct dictionary
- // is generated. No dictionary file is created for timestamp datatype column
- switch (dataType) {
- case DECIMAL:
- parsedValue = new BigDecimal(value);
- break;
+ switch (actualDataType) {
case INT:
- parsedValue = Integer.parseInt(value);
+ parsedValue = Integer.parseInt(data);
break;
case LONG:
- parsedValue = Long.valueOf(value);
- break;
- case DOUBLE:
- parsedValue = Double.valueOf(value);
+ parsedValue = Long.parseLong(data);
break;
default:
- return true;
+ return data;
}
- if (null != parsedValue) {
- return true;
+ if(null != parsedValue) {
+ return data;
}
- return false;
- } catch (Exception e) {
- return false;
+ return null;
+ } catch (NumberFormatException ex) {
+ return null;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/query/aggregator/impl/CountStarAggregator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/aggregator/impl/CountStarAggregator.java b/core/src/main/java/org/carbondata/query/aggregator/impl/CountStarAggregator.java
deleted file mode 100644
index 3b7e78c..0000000
--- a/core/src/main/java/org/carbondata/query/aggregator/impl/CountStarAggregator.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.carbondata.query.aggregator.impl;
-
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.query.aggregator.MeasureAggregator;
-
-/**
- * Class Description : It will return total count of values
- */
-public class CountStarAggregator extends CountAggregator {
-
- @Override public void agg(double newVal) {
- super.agg(newVal);
- }
-
- @Override public void agg(Object newVal) {
- aggVal++;
- }
-
- @Override public void agg(MeasureColumnDataChunk dataChunk, int index) {
- aggVal++;
- }
-
- @Override public MeasureAggregator getCopy() {
- CountStarAggregator aggregator = new CountStarAggregator();
- aggregator.aggVal = aggVal;
- return aggregator;
- }
-
- @Override public MeasureAggregator getNew() {
- return new CountStarAggregator();
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java b/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
index 8fee45a..00e0f48 100644
--- a/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
+++ b/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
@@ -57,7 +57,6 @@ import org.carbondata.scan.executor.exception.QueryExecutionException;
import org.carbondata.scan.executor.infos.KeyStructureInfo;
import org.carbondata.scan.expression.ColumnExpression;
import org.carbondata.scan.expression.Expression;
-import org.carbondata.scan.expression.logical.BinaryLogicalExpression;
import org.carbondata.scan.filter.GenericQueryType;
import org.carbondata.scan.filter.resolver.FilterResolverIntf;
import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/carbondata/scan/filter/FilterExpressionProcessor.java
index a2c6f28..6543af6 100644
--- a/core/src/main/java/org/carbondata/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/carbondata/scan/filter/FilterExpressionProcessor.java
@@ -40,7 +40,6 @@ import org.carbondata.scan.expression.Expression;
import org.carbondata.scan.expression.conditional.BinaryConditionalExpression;
import org.carbondata.scan.expression.conditional.ConditionalExpression;
import org.carbondata.scan.expression.exception.FilterUnsupportedException;
-import org.carbondata.scan.expression.logical.BinaryLogicalExpression;
import org.carbondata.scan.filter.executer.FilterExecuter;
import org.carbondata.scan.filter.intf.ExpressionType;
import org.carbondata.scan.filter.resolver.ConditionalFilterResolverImpl;
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
index 4f7d814..1be7595 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/AndFilterResolverImpl.java
@@ -23,6 +23,7 @@ import java.util.SortedMap;
import org.carbondata.core.carbon.AbsoluteTableIdentifier;
import org.carbondata.core.carbon.datastore.block.SegmentProperties;
import org.carbondata.scan.executor.exception.QueryExecutionException;
+import org.carbondata.scan.expression.BinaryExpression;
import org.carbondata.scan.filter.intf.ExpressionType;
public class AndFilterResolverImpl extends LogicalFilterResolverImpl {
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java b/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
index 4aa448f..8b18a21 100644
--- a/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
+++ b/core/src/main/java/org/carbondata/scan/filter/resolver/LogicalFilterResolverImpl.java
@@ -24,6 +24,7 @@ import java.util.SortedMap;
import org.carbondata.core.carbon.AbsoluteTableIdentifier;
import org.carbondata.core.carbon.datastore.block.SegmentProperties;
import org.carbondata.scan.executor.exception.QueryExecutionException;
+import org.carbondata.scan.expression.BinaryExpression;
import org.carbondata.scan.expression.Expression;
import org.carbondata.scan.filter.intf.ExpressionType;
import org.carbondata.scan.filter.intf.FilterExecuterType;
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 9e66d99..d0d4f16 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -101,7 +101,11 @@ case class CarbonDictionaryDecoder(
case DataType.DECIMAL =>
val scale: Int = carbonDimension.getColumnSchema.getScale
val precision: Int = carbonDimension.getColumnSchema.getPrecision
- DecimalType(18, 2)
+ if (scale == 0 && precision == 0) {
+ DecimalType(18, 2)
+ } else {
+ DecimalType(precision, scale)
+ }
case DataType.TIMESTAMP => TimestampType
case DataType.STRUCT =>
CarbonMetastoreTypes
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 01bb218..d19d1c5 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -49,10 +49,12 @@ import org.carbondata.core.carbon.metadata.schema.table.{CarbonTable, TableInfo,
import org.carbondata.core.carbon.metadata.schema.table.column.{CarbonDimension, ColumnSchema}
import org.carbondata.core.constants.CarbonCommonConstants
import org.carbondata.core.datastorage.store.impl.FileFactory
+import org.carbondata.core.load.LoadMetadataDetails
import org.carbondata.core.util.{CarbonProperties, CarbonUtil}
import org.carbondata.integration.spark.merger.CompactionType
import org.carbondata.lcm.locks.{CarbonLockFactory, LockUsage}
import org.carbondata.lcm.status.SegmentStatusManager
+import org.carbondata.processing.etl.DataLoadingException
import org.carbondata.spark.CarbonSparkFactory
import org.carbondata.spark.exception.MalformedCarbonCommandException
import org.carbondata.spark.load._
@@ -1193,10 +1195,10 @@ private[sql] case class LoadTable(
}
} catch {
case dle: DataLoadingException =>
- LOGGER.audit(s"Dataload failed for $schemaName.$tableName. " + dle.getMessage)
+ LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + dle.getMessage)
throw dle
case mce: MalformedCarbonCommandException =>
- LOGGER.audit(s"Dataload failed for $schemaName.$tableName. " + mce.getMessage)
+ LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + mce.getMessage)
throw mce
} finally {
if (carbonLock != null) {
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 748a408..70e8257 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -19,7 +19,7 @@
package org.carbondata.spark.rdd
import java.util
-import java.util.concurrent.{Executors, ExecutorService, Future}
+import java.util.concurrent._
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
@@ -31,7 +31,7 @@ import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.spark.{Logging, Partition, SparkContext, SparkEnv}
import org.apache.spark.sql.{CarbonEnv, SQLContext}
-import org.apache.spark.sql.execution.command.{AlterTableModel, CompactionModel, Partitioner}
+import org.apache.spark.sql.execution.command.{AlterTableModel, CompactionCallableModel, CompactionModel, Partitioner}
import org.apache.spark.sql.hive.DistributionUtil
import org.apache.spark.util.{FileUtils, SplitUtils}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
index e79937f..9e0de52 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
@@ -36,8 +36,6 @@ import org.carbondata.core.carbon.{CarbonTableIdentifier, ColumnIdentifier}
import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
import org.carbondata.core.constants.CarbonCommonConstants
import org.carbondata.core.datastorage.store.impl.FileFactory
-import org.carbondata.core.locks.CarbonLockFactory
-import org.carbondata.core.locks.LockUsage
import org.carbondata.core.util.CarbonProperties
import org.carbondata.core.util.CarbonTimeStatisticsFactory
import org.carbondata.lcm.locks.{CarbonLockFactory, LockUsage}
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
index a29cefb..dc7d6a7 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
@@ -51,11 +51,17 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
)
CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "true")
sql(
- "CREATE TABLE directDictionaryTable (empno int,doj Timestamp, " +
+ "CREATE TABLE if not exists directDictionaryTable (empno int,doj Timestamp, " +
"salary int) " +
"STORED BY 'org.apache.carbondata.format'"
)
+ sql(
+ "CREATE TABLE if not exists directDictionaryTable_hive (empno int,doj Timestamp, " +
+ "salary int) " +
+ "row format delimited fields terminated by ','"
+ )
+
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
@@ -63,6 +69,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
val csvFilePath = currentDirectory + "/src/test/resources/datasample.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable OPTIONS" +
"('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+ sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable_hive");
} catch {
case x: Throwable => CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
@@ -80,7 +87,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
test("test direct dictionary for getting all the values") {
checkAnswer(
- sql("select doj from directDictionaryCube"),
+ sql("select doj from directDictionaryTable"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0")),
Row(Timestamp.valueOf("2016-04-14 15:00:09.0")),
Row(null)
@@ -90,7 +97,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
test("test direct dictionary for not equals condition") {
checkAnswer(
- sql("select doj from directDictionaryCube where doj != '2016-04-14 15:00:09.0'"),
+ sql("select doj from directDictionaryTable where doj != '2016-04-14 15:00:09.0'"),
Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0"))
)
)
@@ -98,7 +105,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
test("test direct dictionary for null condition") {
checkAnswer(
- sql("select doj from directDictionaryCube where doj is null"),
+ sql("select doj from directDictionaryTable where doj is null"),
Seq(Row(null)
)
)
@@ -111,7 +118,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
)
}
-
+
test("select doj from directDictionaryTable with regexp_replace equals filter") {
checkAnswer(
sql("select doj from directDictionaryTable where regexp_replace(doj, '-', '/') = '2016/03/14 15:00:09'"),
@@ -122,7 +129,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
test("select doj from directDictionaryTable with regexp_replace NOT IN filter") {
checkAnswer(
sql("select doj from directDictionaryTable where regexp_replace(doj, '-', '/') NOT IN ('2016/03/14 15:00:09')"),
- Seq(Row(Timestamp.valueOf("2016-04-14 15:00:09")), Row(null))
+ sql("select doj from directDictionaryTable_hive where regexp_replace(doj, '-', '/') NOT IN ('2016/03/14 15:00:09')")
)
}
@@ -142,6 +149,7 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
override def afterAll {
sql("drop table directDictionaryTable")
+ sql("drop table directDictionaryTable_hive")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", "false")
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/21d8c7ea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
index 4075e60..3e86e16 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
@@ -47,16 +47,16 @@ class AllDataTypesTestCaseFilter extends QueryTest with BeforeAndAfterAll {
sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypestableFilter_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
}
- test("select empno,empname from alldatatypescubeFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')") {
+ test("select empno,empname from alldatatypestableFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')") {
checkAnswer(
- sql("select empno,empname from alldatatypescubeFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"),
- sql("select empno,empname from alldatatypescubeFilter_hive where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"))
+ sql("select empno,empname from alldatatypestableFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"),
+ sql("select empno,empname from alldatatypestableFilter_hive where regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"))
}
test("select empno,empname from alldatatypescubeFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'") {
checkAnswer(
- sql("select empno,empname from alldatatypescubeFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"),
- sql("select empno,empname from alldatatypescubeFilter_hive where regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"))
+ sql("select empno,empname from alldatatypestableFilter where regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"),
+ sql("select empno,empname from alldatatypestableFilter_hive where regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"))
}
override def afterAll {