You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by aj...@apache.org on 2020/07/13 12:24:21 UTC

[carbondata] branch master updated: [CARBONDATA-3845] Fix Bucket table creation fails with an exception for empty BUCKET_NUMBER and BUCKET_COLUMNS

This is an automated email from the ASF dual-hosted git repository.

ajantha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new dde5003  [CARBONDATA-3845] Fix Bucket table creation fails with an exception for empty BUCKET_NUMBER and BUCKET_COLUMNS
dde5003 is described below

commit dde5003e1a6ceb189b9daeea2c4502a1d394fe95
Author: Shreelekhya <sh...@yahoo.com>
AuthorDate: Mon Jun 29 22:25:30 2020 +0530

    [CARBONDATA-3845] Fix Bucket table creation fails with an exception for empty BUCKET_NUMBER and BUCKET_COLUMNS
    
    Why is this PR needed?
    Bucket table creation fails with an exception for empty BUCKET_NUMBER and BUCKET_COLUMNS
    
    What changes were proposed in this PR?
    wrapped BUCKET_NUMBER to Int conversion with Try, to prevent NumberFormatException for empty/other string values.
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    Yes
    
    This closes #3817
---
 .../org/apache/carbondata/spark/CarbonOption.scala |  3 ++-
 .../spark/sql/parser/CarbonSpark2SqlParser.scala   |  5 ++---
 .../bucketing/TableBucketingTestCase.scala         | 22 ++++++++++++++++++++++
 3 files changed, 26 insertions(+), 4 deletions(-)

diff --git a/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
index 06b2130..107cbe1 100644
--- a/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
+++ b/integration/spark/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.spark
 
+import scala.util.Try
 
 /**
  * Contains all options for Spark data source
@@ -58,7 +59,7 @@ class CarbonOption(options: Map[String, String]) {
 
   lazy val tablePageSizeInMb: Option[String] = options.get("table_page_size_inmb")
 
-  lazy val bucketNumber: Int = options.getOrElse("bucket_number", "0").toInt
+  lazy val bucketNumber: Option[Int] = Try(options.getOrElse("bucket_number", "0").toInt).toOption
 
   lazy val bucketColumns: String = options.getOrElse("bucket_columns", "")
 
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
index b101257..97e1c31 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
@@ -766,13 +766,12 @@ class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
       throw new MalformedCarbonCommandException("Invalid table properties")
     }
     if (options.isBucketingEnabled) {
-      if (options.bucketNumber.toString.contains("-") ||
-          options.bucketNumber.toString.contains("+") ||  options.bucketNumber == 0) {
+      if (options.bucketNumber.isEmpty || options.bucketNumber.get <= 0) {
         throw new MalformedCarbonCommandException("INVALID NUMBER OF BUCKETS SPECIFIED")
       }
       else {
         Some(BucketFields(options.bucketColumns.toLowerCase.split(",").map(_.trim),
-          options.bucketNumber))
+          options.bucketNumber.get))
       }
     } else {
       None
diff --git a/integration/spark/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala b/integration/spark/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
index 1ab3b38..0bfa93e 100644
--- a/integration/spark/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
@@ -232,6 +232,28 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
     }
   }
 
+  test("test create table with empty bucket number must fail") {
+    val ex = intercept[MalformedCarbonCommandException] {
+      sql(
+        "CREATE TABLE t11 (ID Int, date Timestamp, country String, name String, phonetype String," +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
+        "('BUCKET_NUMBER'='', 'BUCKET_COLUMNS'='name')"
+      )
+    }
+    assert(ex.getMessage.contains("INVALID NUMBER OF BUCKETS SPECIFIED"))
+  }
+
+  test("test create table with bucket number having non numeric value must fail") {
+    val ex = intercept[MalformedCarbonCommandException] {
+      sql(
+        "CREATE TABLE t11 (ID Int, date Timestamp, country String, name String, phonetype String," +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
+        "('BUCKET_NUMBER'='one', 'BUCKET_COLUMNS'='name')"
+      )
+    }
+    assert(ex.getMessage.contains("INVALID NUMBER OF BUCKETS SPECIFIED"))
+  }
+
   test("must be unable to create if number of buckets is in negative number") {
     try {
       sql(