You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2018/08/07 13:10:14 UTC

[50/50] [abbrv] carbondata git commit: [CARBONDATA-2768][CarbonStore] Fix error in tests for external csv format

[CARBONDATA-2768][CarbonStore] Fix error in tests for external csv format

In previous implementation earlier than PR2495, we only supportted csv as
external format for carbondata. And we validated the restriction while
creating the table.PR2495 added kafka support, so it removed the
validation, but it did not fix the related test case which cause failure in current version.
This PR fix the error test case.

This closes #2537


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/12ab5799
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/12ab5799
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/12ab5799

Branch: refs/heads/external-format
Commit: 12ab5799271a4e37d31dfd583f6ff83f71064ee6
Parents: 1a26ac1
Author: xuchuanyin <xu...@hust.edu.cn>
Authored: Fri Aug 3 14:34:32 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Tue Aug 7 21:08:19 2018 +0800

----------------------------------------------------------------------
 .../core/statusmanager/FileFormatProperties.java   | 17 +++++++++++++++++
 .../carbondata/hadoop/api/CarbonInputFormat.java   |  2 +-
 .../externalformat/CsvBasedCarbonTableSuite.scala  |  2 +-
 .../command/carbonTableSchemaCommon.scala          | 10 +++++++---
 4 files changed, 26 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/12ab5799/core/src/main/java/org/apache/carbondata/core/statusmanager/FileFormatProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/FileFormatProperties.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/FileFormatProperties.java
index 862c36c..4372b44 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/FileFormatProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/FileFormatProperties.java
@@ -17,10 +17,27 @@
 
 package org.apache.carbondata.core.statusmanager;
 
+import java.util.HashSet;
+import java.util.Set;
+
 /**
  * Provides the constant name for the file format properties
  */
 public class FileFormatProperties {
+  private static final Set<String> SUPPORTED_EXTERNAL_FORMAT = new HashSet<String>() {
+    {
+      add("csv");
+      add("kafka");
+    }
+  };
+
+  public static boolean isExternalFormatSupported(String format) {
+    return SUPPORTED_EXTERNAL_FORMAT.contains(format.toLowerCase());
+  }
+  public static Set<String> getSupportedExternalFormat() {
+    return SUPPORTED_EXTERNAL_FORMAT;
+  }
+
   public static class CSV {
     public static final String HEADER = "csv.header";
     public static final String DELIMITER = "csv.delimiter";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/12ab5799/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index 5fdc522..b6fc4b3 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -55,10 +55,10 @@ import org.apache.carbondata.core.scan.model.QueryModelBuilder;
 import org.apache.carbondata.core.stats.QueryStatistic;
 import org.apache.carbondata.core.stats.QueryStatisticsConstants;
 import org.apache.carbondata.core.stats.QueryStatisticsRecorder;
-import org.apache.carbondata.core.util.BlockletDataMapUtil;
 import org.apache.carbondata.core.statusmanager.FileFormat;
 import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
 import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
+import org.apache.carbondata.core.util.BlockletDataMapUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.CarbonUtil;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/12ab5799/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/externalformat/CsvBasedCarbonTableSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/externalformat/CsvBasedCarbonTableSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/externalformat/CsvBasedCarbonTableSuite.scala
index 7f07878..85ccc10 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/externalformat/CsvBasedCarbonTableSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/externalformat/CsvBasedCarbonTableSuite.scala
@@ -151,7 +151,7 @@ class CsvBasedCarbonTableSuite extends QueryTest
       )
     }
 
-    assert(expectedException.getMessage.contains("Currently we only support csv as external file format"))
+    assert(expectedException.getMessage.contains("Unsupported external format parquet"))
   }
 
   test("test csv based carbon table: the sequence of header does not match schema") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/12ab5799/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 56e91f9..2fdbba7 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -24,6 +24,7 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable
 import scala.collection.mutable.ListBuffer
 
+import org.apache.commons.lang3.StringUtils
 import org.apache.spark.SparkContext
 import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql.catalyst.TableIdentifier
@@ -42,7 +43,7 @@ import org.apache.carbondata.core.metadata.schema._
 import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationIdentifier, TableInfo, TableSchema}
 import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema, ParentColumnTableRelation}
 import org.apache.carbondata.core.service.impl.ColumnUniqueIdGenerator
-import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentUpdateStatusManager}
+import org.apache.carbondata.core.statusmanager.{FileFormatProperties, LoadMetadataDetails, SegmentUpdateStatusManager}
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil, DataTypeUtil}
 import org.apache.carbondata.processing.loading.FailureCauses
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel
@@ -892,8 +893,11 @@ class TableNewProcessor(cm: TableModel) {
     tableInfo.setFactTable(tableSchema)
     val format = cm.tableProperties.get(CarbonCommonConstants.FORMAT)
     if (format.isDefined) {
-      if (!format.get.equalsIgnoreCase("csv")) {
-        CarbonException.analysisException(s"Currently we only support csv as external file format")
+      if (!FileFormatProperties.isExternalFormatSupported(format.get)) {
+        CarbonException.analysisException(
+          s"Unsupported external format ${format.get}, currently carbondata only support" +
+          s" ${FileFormatProperties.getSupportedExternalFormat.asScala.mkString(", ")}" +
+          s" as external file format")
       }
       tableInfo.setFormat(format.get)
       val formatProperties = cm.tableProperties.filter(pair =>