You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/12/20 10:01:28 UTC

carbondata git commit: [CARBONDATA-1906] Update registerTempTable method because it was marked deprecated

Repository: carbondata
Updated Branches:
  refs/heads/master 15f04c35c -> 410df072d


[CARBONDATA-1906] Update registerTempTable method because it was marked deprecated

Update registerTempTable method to createOrReplaceTempView, because registerTempTable was marked deprecated

This closes #1676


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/410df072
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/410df072
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/410df072

Branch: refs/heads/master
Commit: 410df072d842f38bab7316511b0abb9752bd3461
Parents: 15f04c3
Author: xubo245 <60...@qq.com>
Authored: Tue Dec 19 15:20:51 2017 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Wed Dec 20 18:01:19 2017 +0800

----------------------------------------------------------------------
 .../org/apache/carbondata/examples/CompareTest.scala      | 10 +++-------
 .../spark/testsuite/allqueries/AllDataTypesTestCase.scala |  2 +-
 .../testsuite/allqueries/MeasureOnlyTableTestCases.scala  |  2 +-
 .../apache/spark/carbondata/CarbonDataSourceSuite.scala   |  2 +-
 4 files changed, 6 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/410df072/examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala
index 52ccd5f..414f8e1 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CompareTest.scala
@@ -21,8 +21,6 @@ import java.io.File
 import java.text.SimpleDateFormat
 import java.util.Date
 
-import scala.util.Random
-
 import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}
 import org.apache.spark.sql.types._
 
@@ -52,7 +50,7 @@ object CompareTest {
   // +-------------+-----------+-------------+-------------+------------+
   // | country     | string    | 1103        | dimension   | yes        |
   // +-------------+-----------+-------------+-------------+------------+
-  // | planet      | string    | 100,007     | dimension   | yes        |
+  // | planet      | string    | 10,007      | dimension   | yes        |
   // +-------------+-----------+-------------+-------------+------------+
   // | id          | string    | 10,000,000  | dimension   | no         |
   // +-------------+-----------+-------------+-------------+------------+
@@ -67,7 +65,6 @@ object CompareTest {
   // | m5          | decimal   | NA          | measure     | no         |
   // +-------------+-----------+-------------+-------------+------------+
   private def generateDataFrame(spark: SparkSession): DataFrame = {
-    val r = new Random()
     val rdd = spark.sparkContext
         .parallelize(1 to 10 * 1000 * 1000, 4)
         .map { x =>
@@ -257,7 +254,7 @@ object CompareTest {
         .partitionBy("partitionCol")
         .mode(SaveMode.Overwrite)
         .parquet(table)
-    spark.read.parquet(table).registerTempTable(table)
+    spark.read.parquet(table).createOrReplaceTempView(table)
   }
 
   private def loadOrcTable(spark: SparkSession, input: DataFrame, table: String): Double = time {
@@ -265,7 +262,7 @@ object CompareTest {
     input.write
         .mode(SaveMode.Overwrite)
         .orc(table)
-    spark.read.orc(table).registerTempTable(table)
+    spark.read.orc(table).createOrReplaceTempView(table)
   }
 
   private def loadCarbonTable(spark: SparkSession, input: DataFrame, tableName: String): Double = {
@@ -337,7 +334,6 @@ object CompareTest {
   private def runTest(spark: SparkSession, table1: String, table2: String): Unit = {
     val formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
     val date = new Date
-    val timestamp = date.getTime
     // run queries on parquet and carbon
     val table1Result: Array[(Double, Array[Row])] = runQueries(spark, table1)
     // do GC and sleep for some time before running next table

http://git-wip-us.apache.org/repos/asf/carbondata/blob/410df072/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
index 699cf07..e739091 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCase.scala
@@ -1085,7 +1085,7 @@ class AllDataTypesTestCase extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists carbonunion")
     import sqlContext.implicits._
     val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x+"", (x+100)+"")).toDF("c1", "c2")
-    df.registerTempTable("sparkunion")
+    df.createOrReplaceTempView("sparkunion")
     df.write
       .format("carbondata")
       .mode(SaveMode.Overwrite)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/410df072/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
index 0a3869c..ab75c0a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/MeasureOnlyTableTestCases.scala
@@ -376,7 +376,7 @@ class MeasureOnlyTableTestCases extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists carbonunion")
     import sqlContext.implicits._
     val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x, (x+100))).toDF("c1", "c2")
-    df.registerTempTable("sparkunion")
+    df.createOrReplaceTempView("sparkunion")
     df.write
       .format("carbondata")
       .mode(SaveMode.Overwrite)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/410df072/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
index cf465fb..0f934cb 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
@@ -147,7 +147,7 @@ class CarbonDataSourceSuite extends Spark2QueryTest with BeforeAndAfterAll {
     sql("drop table if exists sparkunion")
     import sqlContext.implicits._
     val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x+"", (x+100)+"")).toDF("c1", "c2")
-    df.registerTempTable("sparkunion")
+    df.createOrReplaceTempView("sparkunion")
     df.write
       .format("carbondata")
       .mode(SaveMode.Overwrite)