You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ma...@apache.org on 2018/05/17 14:30:28 UTC

[15/50] [abbrv] carbondata git commit: [CARBONDATA-2401] Date and Timestamp options are not working in SDK

[CARBONDATA-2401] Date and Timestamp options are not working in SDK

Issue:- Date and Timestamp format is passed in options of SDK but data
load is failed even correct data is set as per format .
Cause:- Load model is getting overwritten with default .
Fix :- if user has passed the options then Load model should use from
Options otherwise from Default.

This closes #2227


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/b2060c61
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/b2060c61
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/b2060c61

Branch: refs/heads/spark-2.3
Commit: b2060c61104895d5599b1c044b725d56a39748f8
Parents: ceb7c8d
Author: BJangir <ba...@gmail.com>
Authored: Wed Apr 25 18:27:58 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Thu May 10 14:00:59 2018 +0530

----------------------------------------------------------------------
 .../TestNonTransactionalCarbonTable.scala       | 39 ++++++++++++++++++++
 .../loading/model/CarbonLoadModelBuilder.java   |  4 --
 2 files changed, 39 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/b2060c61/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 2f88c40..ca6ac3c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -17,6 +17,8 @@
 
 package org.apache.carbondata.spark.testsuite.createTable
 
+import java.sql.Timestamp
+import java.io.{File, FileFilter, IOException}
 import java.io.{File, FileFilter}
 import java.util
 
@@ -31,6 +33,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonUtil
+import org.apache.carbondata.sdk.file.{CarbonWriter, CarbonWriterBuilder, Field, Schema}
 import org.apache.carbondata.sdk.file.{AvroCarbonWriter, CarbonWriter, Field, Schema}
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -39,6 +42,10 @@ import org.apache.avro
 import org.apache.commons.lang.CharEncoding
 import tech.allegro.schema.json2avro.converter.JsonAvroConverter
 
+import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
+import org.apache.carbondata.sdk.file.{CarbonWriter, CarbonWriterBuilder, Field, Schema}
+
+
 class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
 
   var writerPath = new File(this.getClass.getResource("/").getPath
@@ -669,6 +676,38 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     cleanTestData()
   }
 
+  test("test custom  format for date and timestamp in sdk") {
+
+    cleanTestData()
+    var options = Map("dateformat" -> "dd-MM-yyyy" ,"timestampformat" -> "dd-MM-yyyy HH:mm:ss").asJava
+
+    val fields: Array[Field] = new Array[Field](4)
+    fields(0) = new Field("stringField", DataTypes.STRING)
+    fields(1) = new Field("intField", DataTypes.INT)
+    fields(2) = new Field("mydate", DataTypes.DATE)
+    fields(3) = new Field("mytime", DataTypes.TIMESTAMP)
+
+    val builder: CarbonWriterBuilder = CarbonWriter.builder.withSchema(new Schema(fields))
+      .outputPath(writerPath).isTransactionalTable(false).withLoadOptions(options)
+
+    val writer: CarbonWriter = builder.buildWriterForCSVInput
+    writer.write(Array("babu","1","02-01-2002","02-01-2002 01:01:00"));
+    writer.close()
+
+    assert(new File(writerPath).exists())
+
+    sql("DROP TABLE IF EXISTS sdkOutputTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(
+      Row("babu", 1, java.sql.Date.valueOf("2002-01-02"),Timestamp.valueOf("2002-01-02 01:01:00.0"))))
+    sql("DROP TABLE sdkOutputTable")
+    cleanTestData()
+
+  }
+
   test("test huge data write with one batch having bad record") {
 
     val exception =

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b2060c61/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
index 3385479..13dd75c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
@@ -80,10 +80,6 @@ public class CarbonLoadModelBuilder {
     // we have provided 'fileheader', so it hadoopConf can be null
     build(options, optionsFinal, model, null);
 
-
-    // set default values
-    model.setTimestampformat(CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
-    model.setDateFormat(CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT);
     model.setUseOnePass(Boolean.parseBoolean(Maps.getOrDefault(options, "onepass", "false")));
     model.setDictionaryServerHost(Maps.getOrDefault(options, "dicthost", null));
     try {