You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2018/12/21 08:01:59 UTC

carbondata git commit: [CARBONDATA-3127]Fix the HiveExample & TestCarbonSerde exception

Repository: carbondata
Updated Branches:
  refs/heads/master 96b2ea364 -> bd752e9d5


[CARBONDATA-3127]Fix the HiveExample & TestCarbonSerde exception

This pull request fix HiveExample and move it to example module

This closes #3012


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/bd752e9d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/bd752e9d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/bd752e9d

Branch: refs/heads/master
Commit: bd752e9d5aa17a8b047dc15f04eea6ecee9dac98
Parents: 96b2ea3
Author: Nicholas Jiang <pr...@163.com>
Authored: Fri Dec 21 04:14:09 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Fri Dec 21 16:01:42 2018 +0800

----------------------------------------------------------------------
 examples/flink/pom.xml                          |   2 +-
 examples/spark2/pom.xml                         |  14 +-
 examples/spark2/src/main/resources/sample.csv   |   3 +
 .../carbondata/examples/HiveExample.scala       | 209 +++++++++++++++++++
 .../carbondata/examples/util/ExampleUtils.scala |   5 -
 .../carbondata/examplesCI/RunExamples.scala     |   8 +-
 .../carbondata/hive/CarbonHiveInputSplit.java   |  35 +++-
 .../hive/MapredCarbonInputFormat.java           |  15 +-
 integration/hive/src/main/resources/data.csv    |   3 -
 .../carbondata/hiveexample/HiveExample.scala    | 197 -----------------
 .../apache/carbondata/hive/TestCarbonSerDe.java | 137 ++++++++++++
 .../apache/carbondata/hive/TestCarbonSerde.java | 133 ------------
 .../hiveexampleCI/RunHiveExampleTest.scala      |  43 ----
 pom.xml                                         |   4 +-
 14 files changed, 414 insertions(+), 394 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/flink/pom.xml
----------------------------------------------------------------------
diff --git a/examples/flink/pom.xml b/examples/flink/pom.xml
index 817007b..127b8fb 100644
--- a/examples/flink/pom.xml
+++ b/examples/flink/pom.xml
@@ -57,7 +57,7 @@
     </dependency>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-examples-spark2</artifactId>
+      <artifactId>carbondata-examples</artifactId>
       <version>${project.version}</version>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/spark2/pom.xml
----------------------------------------------------------------------
diff --git a/examples/spark2/pom.xml b/examples/spark2/pom.xml
index 48728ac..24ca4b1 100644
--- a/examples/spark2/pom.xml
+++ b/examples/spark2/pom.xml
@@ -26,8 +26,8 @@
     <relativePath>../../pom.xml</relativePath>
   </parent>
 
-  <artifactId>carbondata-examples-spark2</artifactId>
-  <name>Apache CarbonData :: Spark2 Examples</name>
+  <artifactId>carbondata-examples</artifactId>
+  <name>Apache CarbonData :: Examples</name>
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
@@ -36,6 +36,11 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-hive</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
       <artifactId>carbondata-spark2</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -81,6 +86,11 @@
       <version>${httpclient.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>${httpcore.version}</version>
+    </dependency>
+    <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/spark2/src/main/resources/sample.csv
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/resources/sample.csv b/examples/spark2/src/main/resources/sample.csv
new file mode 100644
index 0000000..34f96b6
--- /dev/null
+++ b/examples/spark2/src/main/resources/sample.csv
@@ -0,0 +1,3 @@
+ID,NAME,SALARY
+1,'liang',200000
+2,'anubhav',20000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
new file mode 100644
index 0000000..69678a8
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.examples
+
+import java.io.File
+import java.sql.{DriverManager, ResultSet, Statement}
+
+import org.apache.spark.sql.SparkSession
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples.util.ExampleUtils
+import org.apache.carbondata.hive.server.HiveEmbeddedServer2
+
+// scalastyle:off println
+object HiveExample {
+
+  private val driverName: String = "org.apache.hive.jdbc.HiveDriver"
+
+  def main(args: Array[String]) {
+    val carbonSession = ExampleUtils.createCarbonSession("HiveExample")
+    exampleBody(carbonSession, CarbonProperties.getStorePath
+      + CarbonCommonConstants.FILE_SEPARATOR
+      + CarbonCommonConstants.DATABASE_DEFAULT_NAME)
+    carbonSession.close()
+
+    System.exit(0)
+  }
+
+  def exampleBody(carbonSession: SparkSession, store: String): Unit = {
+    val logger = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+    val rootPath = new File(this.getClass.getResource("/").getPath
+      + "../../../..").getCanonicalPath
+
+    carbonSession.sql("""DROP TABLE IF EXISTS HIVE_CARBON_EXAMPLE""".stripMargin)
+
+    carbonSession.sql(
+      s"""
+         | CREATE TABLE HIVE_CARBON_EXAMPLE
+         | (ID int,NAME string,SALARY double)
+         | STORED BY 'carbondata'
+       """.stripMargin)
+
+    carbonSession.sql(
+      s"""
+         | LOAD DATA LOCAL INPATH '$rootPath/examples/spark2/src/main/resources/sample.csv'
+         | INTO TABLE HIVE_CARBON_EXAMPLE
+       """.stripMargin)
+
+    carbonSession.sql("SELECT * FROM HIVE_CARBON_EXAMPLE").show()
+
+    carbonSession.stop()
+
+    try {
+      Class.forName(driverName)
+    }
+    catch {
+      case classNotFoundException: ClassNotFoundException =>
+        classNotFoundException.printStackTrace()
+    }
+
+    val hiveEmbeddedServer2 = new HiveEmbeddedServer2()
+    hiveEmbeddedServer2.start()
+    val port = hiveEmbeddedServer2.getFreePort
+    val connection = DriverManager.getConnection(s"jdbc:hive2://localhost:$port/default", "", "")
+    val statement: Statement = connection.createStatement
+
+    logger.info(s"============HIVE CLI IS STARTED ON PORT $port ==============")
+
+    statement.execute(
+      s"""
+         | CREATE TABLE IF NOT EXISTS HIVE_CARBON_EXAMPLE
+         | (ID int, NAME string,SALARY double)
+         | ROW FORMAT SERDE 'org.apache.carbondata.hive.CarbonHiveSerDe'
+         | WITH SERDEPROPERTIES ('mapreduce.input.carboninputformat.databaseName'='default',
+         | 'mapreduce.input.carboninputformat.tableName'='HIVE_CARBON_EXAMPLE')
+       """.stripMargin)
+
+    statement.execute(
+      s"""
+         | ALTER TABLE HIVE_CARBON_EXAMPLE
+         | SET FILEFORMAT
+         | INPUTFORMAT \"org.apache.carbondata.hive.MapredCarbonInputFormat\"
+         | OUTPUTFORMAT \"org.apache.carbondata.hive.MapredCarbonOutputFormat\"
+         | SERDE \"org.apache.carbondata.hive.CarbonHiveSerDe\"
+       """.stripMargin)
+
+    statement
+      .execute(
+        "ALTER TABLE HIVE_CARBON_EXAMPLE SET LOCATION " +
+          s"'file:///$store/hive_carbon_example' ")
+
+    val resultSet: ResultSet = statement.executeQuery("SELECT * FROM HIVE_CARBON_EXAMPLE")
+
+    var rowsFetched = 0
+    var resultId = ""
+    var resultName = ""
+    var resultSalary = ""
+
+    while (resultSet.next) {
+      if (rowsFetched == 0) {
+        println("+---+" + "+-------+" + "+--------------+")
+        println("| ID|" + "| NAME |" + "| SALARY        |")
+
+        println("+---+" + "+-------+" + "+--------------+")
+
+        resultId = resultSet.getString("id")
+        resultName = resultSet.getString("name")
+        resultSalary = resultSet.getString("salary")
+
+        println(s"| $resultId |" + s"| $resultName |" + s"| $resultSalary  |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      else {
+        resultId = resultSet.getString("ID")
+        resultName = resultSet.getString("NAME")
+        resultSalary = resultSet.getString("SALARY")
+
+        println(s"| $resultId |" + s"| $resultName |" + s"| $resultSalary   |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      rowsFetched = rowsFetched + 1
+    }
+    println(s"******Total Number Of Rows Fetched ****** $rowsFetched")
+    assert(rowsFetched == 2)
+
+    logger.info("Fetching the Individual Columns ")
+
+    // fetching the separate columns
+    var individualColRowsFetched = 0
+
+    val resultIndividualCol = statement.executeQuery("SELECT NAME FROM HIVE_CARBON_EXAMPLE")
+
+    while (resultIndividualCol.next) {
+      if (individualColRowsFetched == 0) {
+        println("+--------------+")
+        println("| NAME         |")
+
+        println("+---++---------+")
+
+        resultName = resultIndividualCol.getString("name")
+
+        println(s"| $resultName    |")
+        println("+---+" + "+---------+")
+      }
+      else {
+        resultName = resultIndividualCol.getString("NAME")
+
+        println(s"| $resultName      |")
+        println("+---+" + "+---------+")
+      }
+      individualColRowsFetched = individualColRowsFetched + 1
+    }
+    println(" ********** Total Rows Fetched When Quering The Individual Columns **********" +
+      s"$individualColRowsFetched")
+    assert(individualColRowsFetched == 2)
+
+    logger.info("Fetching the Out Of Order Columns ")
+
+    val resultOutOfOrderCol = statement
+      .executeQuery("SELECT SALARY,ID,NAME FROM HIVE_CARBON_EXAMPLE")
+    var outOfOrderColFetched = 0
+
+    while (resultOutOfOrderCol.next()) {
+      if (outOfOrderColFetched == 0) {
+        println("+---+" + "+-------+" + "+--------------+")
+        println("| Salary|" + "| ID |" + "| NAME        |")
+
+        println("+---+" + "+-------+" + "+--------------+")
+
+        resultId = resultOutOfOrderCol.getString("id")
+        resultName = resultOutOfOrderCol.getString("name")
+        resultSalary = resultOutOfOrderCol.getString("salary")
+
+        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName  |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      else {
+        resultId = resultOutOfOrderCol.getString("ID")
+        resultName = resultOutOfOrderCol.getString("NAME")
+        resultSalary = resultOutOfOrderCol.getString("SALARY")
+
+        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName   |")
+        println("+---+" + "+-------+" + "+--------------+")
+      }
+      outOfOrderColFetched = outOfOrderColFetched + 1
+    }
+    println(" ********** Total Rows Fetched When Quering The Out Of Order Columns **********" +
+      s"$outOfOrderColFetched")
+    assert(outOfOrderColFetched == 2)
+
+    hiveEmbeddedServer2.stop()
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
index e12c2f9..3064d69 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -24,9 +24,6 @@ import org.apache.spark.sql.{SaveMode, SparkSession}
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
-
-// scalastyle:off println
-
 object ExampleUtils {
 
   def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
@@ -113,5 +110,3 @@ object ExampleUtils {
     spark.sql(s"DROP TABLE IF EXISTS $tableName")
   }
 }
-// scalastyle:on println
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala b/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
index 25ef9af..c5db40b 100644
--- a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
+++ b/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
@@ -17,7 +17,9 @@
 
 package org.apache.carbondata.examplesCI
 
+import org.apache.spark.sql.test.TestQueryExecutor
 import org.apache.spark.sql.test.util.QueryTest
+
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.examples._
@@ -123,4 +125,8 @@ class RunExamples extends QueryTest with BeforeAndAfterAll {
   test("CarbonReaderExample") {
     CarbonReaderExample.main(null)
   }
-}
\ No newline at end of file
+
+  test("HiveExample") {
+    HiveExample.exampleBody(spark, TestQueryExecutor.warehouse)
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
index 9171470..a473303 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
@@ -29,6 +29,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.block.BlockletInfos;
 import org.apache.carbondata.core.datastore.block.Distributable;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
+import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.mutate.UpdateVO;
 import org.apache.carbondata.core.util.CarbonProperties;
@@ -68,6 +69,8 @@ public class CarbonHiveInputSplit extends FileSplit
 
   private List<UpdateVO> invalidTimestampsList;
 
+  private BlockletDetailInfo detailInfo;
+
   public CarbonHiveInputSplit() {
     segmentId = null;
     taskId = "0";
@@ -124,9 +127,12 @@ public class CarbonHiveInputSplit extends FileSplit
       BlockletInfos blockletInfos =
           new BlockletInfos(split.getNumberOfBlocklets(), 0, split.getNumberOfBlocklets());
       try {
-        tableBlockInfoList.add(
-            new TableBlockInfo(split.getPath().toString(), split.getStart(), split.getSegmentId(),
-                split.getLocations(), split.getLength(), blockletInfos, split.getVersion(), null));
+        TableBlockInfo blockInfo = new TableBlockInfo(split.getPath().toString(), split.getStart(),
+                split.getSegmentId(), split.getLocations(), split.getLength(), blockletInfos,
+                split.getVersion(), null);
+        blockInfo.setDetailInfo(split.getDetailInfo());
+        blockInfo.setBlockOffset(split.getDetailInfo().getBlockFooterOffset());
+        tableBlockInfoList.add(blockInfo);
       } catch (IOException e) {
         throw new RuntimeException("fail to get location of split: " + split, e);
       }
@@ -138,9 +144,13 @@ public class CarbonHiveInputSplit extends FileSplit
     BlockletInfos blockletInfos =
         new BlockletInfos(inputSplit.getNumberOfBlocklets(), 0, inputSplit.getNumberOfBlocklets());
     try {
-      return new TableBlockInfo(inputSplit.getPath().toString(), inputSplit.getStart(),
+      TableBlockInfo blockInfo =
+              new TableBlockInfo(inputSplit.getPath().toString(), inputSplit.getStart(),
           inputSplit.getSegmentId(), inputSplit.getLocations(), inputSplit.getLength(),
           blockletInfos, inputSplit.getVersion(), null);
+      blockInfo.setDetailInfo(inputSplit.getDetailInfo());
+      blockInfo.setBlockOffset(inputSplit.getDetailInfo().getBlockFooterOffset());
+      return blockInfo;
     } catch (IOException e) {
       throw new RuntimeException("fail to get location of split: " + inputSplit, e);
     }
@@ -160,6 +170,11 @@ public class CarbonHiveInputSplit extends FileSplit
     for (int i = 0; i < numInvalidSegment; i++) {
       invalidSegments.add(in.readUTF());
     }
+    boolean detailInfoExists = in.readBoolean();
+    if (detailInfoExists) {
+      detailInfo = new BlockletDetailInfo();
+      detailInfo.readFields(in);
+    }
     this.numberOfBlocklets = in.readInt();
   }
 
@@ -172,6 +187,10 @@ public class CarbonHiveInputSplit extends FileSplit
     for (String invalidSegment : invalidSegments) {
       out.writeUTF(invalidSegment);
     }
+    out.writeBoolean(detailInfo != null);
+    if (detailInfo != null) {
+      detailInfo.write(out);
+    }
     out.writeInt(numberOfBlocklets);
   }
 
@@ -304,4 +323,12 @@ public class CarbonHiveInputSplit extends FileSplit
   public Map<String, String> getBlockStorageIdMap() {
     return blockStorageIdMap;
   }
+
+  public BlockletDetailInfo getDetailInfo() {
+    return detailInfo;
+  }
+
+  public void setDetailInfo(BlockletDetailInfo detailInfo) {
+    this.detailInfo = detailInfo;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index 9382922..634c116 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -22,6 +22,7 @@ import java.util.List;
 
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.exception.InvalidConfigurationException;
+import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.schema.SchemaReader;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
@@ -106,9 +107,17 @@ public class MapredCarbonInputFormat extends CarbonTableInputFormat<ArrayWritabl
     CarbonInputSplit split;
     for (int i = 0; i < splitList.size(); i++) {
       split = (CarbonInputSplit) splitList.get(i);
-      splits[i] = new CarbonHiveInputSplit(split.getSegmentId(), split.getPath(), split.getStart(),
-          split.getLength(), split.getLocations(), split.getNumberOfBlocklets(), split.getVersion(),
-          split.getBlockStorageIdMap());
+      CarbonHiveInputSplit inputSplit = new CarbonHiveInputSplit(split.getSegmentId(),
+              split.getPath(), split.getStart(), split.getLength(),
+              split.getLocations(), split.getNumberOfBlocklets(),
+              split.getVersion(), split.getBlockStorageIdMap());
+      BlockletDetailInfo info = new BlockletDetailInfo();
+      info.setBlockSize(split.getLength());
+      info.setBlockFooterOffset(split.getDetailInfo().getBlockFooterOffset());
+      info.setVersionNumber(split.getVersion().number());
+      info.setUseMinMaxForPruning(false);
+      inputSplit.setDetailInfo(info);
+      splits[i] = inputSplit;
     }
     return splits;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/main/resources/data.csv
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/resources/data.csv b/integration/hive/src/main/resources/data.csv
deleted file mode 100644
index 34f96b6..0000000
--- a/integration/hive/src/main/resources/data.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-ID,NAME,SALARY
-1,'liang',200000
-2,'anubhav',20000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala b/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
deleted file mode 100644
index 3638321..0000000
--- a/integration/hive/src/main/scala/org/apache/carbondata/hiveexample/HiveExample.scala
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.hiveexample
-
-import java.io.File
-import java.sql.{DriverManager, ResultSet, Statement}
-
-import org.apache.spark.sql.SparkSession
-
-import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.hive.server.HiveEmbeddedServer2
-
-// scalastyle:off println
-object HiveExample {
-
-  private val driverName: String = "org.apache.hive.jdbc.HiveDriver"
-
-  def main(args: Array[String]) {
-    val rootPath = new File(this.getClass.getResource("/").getPath
-                            + "../../../..").getCanonicalPath
-    val store = s"$rootPath/integration/hive/target/store"
-    val warehouse = s"$rootPath/integration/hive/target/warehouse"
-    val metaStore_Db = s"$rootPath/integration/hive/target/carbon_metaStore_db"
-    val logger = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
-    var resultId = ""
-    var resultName = ""
-    var resultSalary = ""
-
-
-    import org.apache.spark.sql.CarbonSession._
-
-    val carbonSession = SparkSession
-      .builder()
-      .master("local")
-      .appName("HiveExample")
-      .config("carbonSession.sql.warehouse.dir", warehouse).enableHiveSupport()
-      .getOrCreateCarbonSession(
-        store, metaStore_Db)
-
-    carbonSession.sql("""DROP TABLE IF EXISTS HIVE_CARBON_EXAMPLE""".stripMargin)
-
-    carbonSession
-      .sql(
-        """CREATE TABLE HIVE_CARBON_EXAMPLE (ID int,NAME string,SALARY double) STORED BY
-          |'CARBONDATA' """
-          .stripMargin)
-
-    carbonSession.sql(
-      s"""
-           LOAD DATA LOCAL INPATH '$rootPath/integration/hive/src/main/resources/data.csv' INTO
-           TABLE
-         HIVE_CARBON_EXAMPLE
-           """)
-    carbonSession.sql("SELECT * FROM HIVE_CARBON_EXAMPLE").show()
-
-    carbonSession.stop()
-
-    try {
-      Class.forName(driverName)
-    }
-    catch {
-      case classNotFoundException: ClassNotFoundException =>
-        classNotFoundException.printStackTrace()
-    }
-
-    val hiveEmbeddedServer2 = new HiveEmbeddedServer2()
-    hiveEmbeddedServer2.start()
-    val port = hiveEmbeddedServer2.getFreePort
-    val connection = DriverManager.getConnection(s"jdbc:hive2://localhost:$port/default", "", "")
-    val statement: Statement = connection.createStatement
-
-    logger.info(s"============HIVE CLI IS STARTED ON PORT $port ==============")
-
-    statement.execute("CREATE TABLE IF NOT EXISTS " + "HIVE_CARBON_EXAMPLE " +
-                      " (ID int, NAME string,SALARY double)")
-    statement
-      .execute(
-        "ALTER TABLE HIVE_CARBON_EXAMPLE SET FILEFORMAT INPUTFORMAT \"org.apache.carbondata." +
-        "hive.MapredCarbonInputFormat\"OUTPUTFORMAT \"org.apache.carbondata.hive." +
-        "MapredCarbonOutputFormat\"SERDE \"org.apache.carbondata.hive." +
-        "CarbonHiveSerDe\" ")
-
-    statement
-      .execute(
-        "ALTER TABLE HIVE_CARBON_EXAMPLE SET LOCATION " +
-        s"'file:///$store/hive_carbon_example' ")
-
-    val sql = "SELECT * FROM HIVE_CARBON_EXAMPLE"
-
-    val resultSet: ResultSet = statement.executeQuery(sql)
-
-    var rowsFetched = 0
-
-    while (resultSet.next) {
-      if (rowsFetched == 0) {
-        println("+---+" + "+-------+" + "+--------------+")
-        println("| ID|" + "| NAME |" + "| SALARY        |")
-
-        println("+---+" + "+-------+" + "+--------------+")
-
-        resultId = resultSet.getString("id")
-        resultName = resultSet.getString("name")
-        resultSalary = resultSet.getString("salary")
-
-        println(s"| $resultId |" + s"| $resultName |" + s"| $resultSalary  |")
-        println("+---+" + "+-------+" + "+--------------+")
-      }
-      else {
-        resultId = resultSet.getString("ID")
-        resultName = resultSet.getString("NAME")
-        resultSalary = resultSet.getString("SALARY")
-
-        println(s"| $resultId |" + s"| $resultName |" + s"| $resultSalary   |")
-        println("+---+" + "+-------+" + "+--------------+")
-      }
-      rowsFetched = rowsFetched + 1
-    }
-    println(s"******Total Number Of Rows Fetched ****** $rowsFetched")
-
-    logger.info("Fetching the Individual Columns ")
-
-    // fetching the separate columns
-    var individualColRowsFetched = 0
-
-    val resultIndividualCol = statement.executeQuery("SELECT NAME FROM HIVE_CARBON_EXAMPLE")
-
-    while (resultIndividualCol.next) {
-      if (individualColRowsFetched == 0) {
-        println("+--------------+")
-        println("| NAME         |")
-
-        println("+---++---------+")
-
-        resultName = resultIndividualCol.getString("name")
-
-        println(s"| $resultName    |")
-        println("+---+" + "+---------+")
-      }
-      else {
-        resultName = resultIndividualCol.getString("NAME")
-
-        println(s"| $resultName      |")
-        println("+---+" + "+---------+")
-      }
-      individualColRowsFetched = individualColRowsFetched + 1
-    }
-    println(" ********** Total Rows Fetched When Quering The Individual Column **********" +
-            s"$individualColRowsFetched")
-
-    logger.info("Fetching the Out Of Order Columns ")
-
-    val resultOutOfOrderCol = statement
-      .executeQuery("SELECT SALARY,ID,NAME FROM HIVE_CARBON_EXAMPLE")
-    var outOfOrderColFetched = 0
-
-    while (resultOutOfOrderCol.next()) {
-      if (outOfOrderColFetched == 0) {
-        println("+---+" + "+-------+" + "+--------------+")
-        println("| Salary|" + "| ID |" + "| NAME        |")
-
-        println("+---+" + "+-------+" + "+--------------+")
-
-        resultId = resultOutOfOrderCol.getString("id")
-        resultName = resultOutOfOrderCol.getString("name")
-        resultSalary = resultOutOfOrderCol.getString("salary")
-
-        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName  |")
-        println("+---+" + "+-------+" + "+--------------+")
-      }
-      else {
-        resultId = resultOutOfOrderCol.getString("ID")
-        resultName = resultOutOfOrderCol.getString("NAME")
-        resultSalary = resultOutOfOrderCol.getString("SALARY")
-
-        println(s"| $resultSalary |" + s"| $resultId |" + s"| $resultName   |")
-        println("+---+" + "+-------+" + "+--------------+")
-      }
-      outOfOrderColFetched = outOfOrderColFetched + 1
-    }
-    hiveEmbeddedServer2.stop()
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerDe.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerDe.java b/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerDe.java
new file mode 100644
index 0000000..33337d3
--- /dev/null
+++ b/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerDe.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.hive;
+
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.*;
+
+import org.apache.log4j.Logger;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.carbondata.common.logging.LogServiceFactory;
+
+public class TestCarbonSerDe {
+
+    private static final Logger LOGGER =
+            LogServiceFactory.getLogService(TestCarbonSerDe.class.getCanonicalName());
+
+    @Test
+    public void testCarbonHiveSerDe() throws Throwable {
+        // Create the SerDe
+        LOGGER.info("test: testCarbonHiveSerDe");
+
+        final CarbonHiveSerDe carbonHiveSerDe = new CarbonHiveSerDe();
+        final Configuration configuration = new Configuration();
+        final Properties tblProperties = createProperties();
+        SerDeUtils.initializeSerDe(carbonHiveSerDe, configuration, tblProperties, null);
+
+        // Data
+        final Writable[] arr = new Writable[7];
+
+        // Primitive types
+        arr[0] = new ShortWritable((short) 456);
+        arr[1] = new IntWritable(789);
+        arr[2] = new LongWritable(1000l);
+        arr[3] = new DoubleWritable(5.3);
+        arr[4] = new HiveDecimalWritable(HiveDecimal.create(1));
+        arr[5] = new Text("CarbonSerDe Binary".getBytes("UTF-8"));
+
+        final Writable[] arrayContainer = new Writable[1];
+        final Writable[] array = new Writable[5];
+        for (int i = 0; i < 5; ++i) {
+            array[i] = new IntWritable(i);
+        }
+        arrayContainer[0] = new ArrayWritable(Writable.class, array);
+        arr[6] = new ArrayWritable(Writable.class, arrayContainer);
+
+        final ArrayWritable arrayWritable = new ArrayWritable(Writable.class, arr);
+        // Test
+        deserializeAndSerializeLazySimple(carbonHiveSerDe, arrayWritable);
+
+        LOGGER.info("test: testCarbonHiveSerDe - OK");
+    }
+
+    private void deserializeAndSerializeLazySimple(final CarbonHiveSerDe serDe,
+                                                   final ArrayWritable t) throws SerDeException {
+
+        // Get the row structure
+        final StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
+
+        // Deserialize
+        final Object row = serDe.deserialize(t);
+        Assert.assertEquals("deserialization gives the wrong object class", row.getClass(),
+                ArrayWritable.class);
+        Assert.assertEquals("size correct after deserialization",
+                serDe.getSerDeStats().getRawDataSize(), t.get().length);
+        Assert.assertEquals("deserialization gives the wrong object", t, row);
+
+        // Serialize
+        final ArrayWritable serializedArr = (ArrayWritable) serDe.serialize(row, oi);
+        Assert.assertEquals("size correct after serialization", serDe.getSerDeStats().getRawDataSize(),
+                serializedArr.get().length);
+        Assert.assertTrue("serialized object should be equal to starting object",
+                arrayWritableEquals(t, serializedArr));
+    }
+
+    private Properties createProperties() {
+        final Properties tbl = new Properties();
+
+        // Set the configuration parameters
+        tbl.setProperty("columns", "aShort,aInt,aLong,aDouble,aDecimal,aString,aList");
+        tbl.setProperty("columns.types",
+                "smallint:int:bigint:double:decimal:string:array<int>");
+        tbl.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
+        return tbl;
+    }
+
+    private static boolean arrayWritableEquals(final ArrayWritable a1, final ArrayWritable a2) {
+        final Writable[] a1Arr = a1.get();
+        final Writable[] a2Arr = a2.get();
+
+        if (a1Arr.length != a2Arr.length) {
+            return false;
+        }
+
+        for (int i = 0; i < a1Arr.length; ++i) {
+            if (a1Arr[i] instanceof ArrayWritable) {
+                if (!(a2Arr[i] instanceof ArrayWritable)) {
+                    return false;
+                }
+                if (!arrayWritableEquals((ArrayWritable) a1Arr[i], (ArrayWritable) a2Arr[i])) {
+                    return false;
+                }
+            } else {
+                if (!a1Arr[i].equals(a2Arr[i])) {
+                    return false;
+                }
+            }
+
+        }
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java b/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
deleted file mode 100644
index 757a342..0000000
--- a/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
+++ /dev/null
@@ -1,133 +0,0 @@
-///*
-// * Licensed to the Apache Software Foundation (ASF) under one or more
-// * contributor license agreements.  See the NOTICE file distributed with
-// * this work for additional information regarding copyright ownership.
-// * The ASF licenses this file to You under the Apache License, Version 2.0
-// * (the "License"); you may not use this file except in compliance with
-// * the License.  You may obtain a copy of the License at
-// *
-// *    http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// */
-//package org.apache.carbondata.hive;
-//
-//import junit.framework.TestCase;
-//import org.apache.hadoop.conf.Configuration;
-//import org.apache.hadoop.hive.common.type.HiveDecimal;
-//import org.apache.hadoop.hive.serde2.SerDeException;
-//import org.apache.hadoop.hive.serde2.SerDeUtils;
-//import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-//import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-//import org.apache.hadoop.hive.serde2.io.ShortWritable;
-//import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-//import org.apache.hadoop.io.*;
-//import org.junit.Test;
-//
-//import java.util.Properties;
-//
-//public class TestCarbonSerde extends TestCase {
-//  @Test
-//  public void testCarbonHiveSerDe() throws Throwable {
-//    try {
-//      // Create the SerDe
-//      System.out.println("test: testCarbonHiveSerDe");
-//
-//      final CarbonHiveSerDe serDe = new CarbonHiveSerDe();
-//      final Configuration conf = new Configuration();
-//      final Properties tbl = createProperties();
-//      SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
-//
-//      // Data
-//      final Writable[] arr = new Writable[7];
-//
-//      //primitive types
-//      arr[0] = new ShortWritable((short) 456);
-//      arr[1] = new IntWritable(789);
-//      arr[2] = new LongWritable(1000l);
-//      arr[3] = new DoubleWritable((double) 5.3);
-//      arr[4] = new HiveDecimalWritable(HiveDecimal.create(1));
-//      arr[5] = new Text("carbonSerde binary".getBytes("UTF-8"));
-//
-//      final Writable[] arrayContainer = new Writable[1];
-//      final Writable[] array = new Writable[5];
-//      for (int i = 0; i < 5; ++i) {
-//        array[i] = new IntWritable(i);
-//      }
-//      arrayContainer[0] = new ArrayWritable(Writable.class, array);
-//      arr[6] = new ArrayWritable(Writable.class, arrayContainer);
-//
-//      final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
-//      // Test
-//      deserializeAndSerializeLazySimple(serDe, arrWritable);
-//      System.out.println("test: testCarbonHiveSerDe - OK");
-//
-//    } catch (final Throwable e) {
-//      e.printStackTrace();
-//      throw e;
-//    }
-//  }
-//
-//  private void deserializeAndSerializeLazySimple(final CarbonHiveSerDe serDe,
-//      final ArrayWritable t) throws SerDeException {
-//
-//    // Get the row structure
-//    final StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
-//
-//    // Deserialize
-//    final Object row = serDe.deserialize(t);
-//    assertEquals("deserialization gives the wrong object class", row.getClass(),
-//        ArrayWritable.class);
-//    assertEquals("size correct after deserialization",
-//        serDe.getSerDeStats().getRawDataSize(), t.get().length);
-//    assertEquals("deserialization gives the wrong object", t, row);
-//
-//    // Serialize
-//    final ArrayWritable serializedArr = (ArrayWritable) serDe.serializeStartKey(row, oi);
-//    assertEquals("size correct after serialization", serDe.getSerDeStats().getRawDataSize(),
-//        serializedArr.get().length);
-//    assertTrue("serialized object should be equal to starting object",
-//        arrayWritableEquals(t, serializedArr));
-//  }
-//
-//  private Properties createProperties() {
-//    final Properties tbl = new Properties();
-//
-//    // Set the configuration parameters
-//    tbl.setProperty("columns", "ashort,aint,along,adouble,adecimal,astring,alist");
-//    tbl.setProperty("columns.types",
-//        "smallint:int:bigint:double:decimal:string:array<int>");
-//    tbl.setProperty(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_NULL_FORMAT, "NULL");
-//    return tbl;
-//  }
-//
-//  public static boolean arrayWritableEquals(final ArrayWritable a1, final ArrayWritable a2) {
-//    final Writable[] a1Arr = a1.get();
-//    final Writable[] a2Arr = a2.get();
-//
-//    if (a1Arr.length != a2Arr.length) {
-//      return false;
-//    }
-//
-//    for (int i = 0; i < a1Arr.length; ++i) {
-//      if (a1Arr[i] instanceof ArrayWritable) {
-//        if (!(a2Arr[i] instanceof ArrayWritable)) {
-//          return false;
-//        }
-//        if (!arrayWritableEquals((ArrayWritable) a1Arr[i], (ArrayWritable) a2Arr[i])) {
-//          return false;
-//        }
-//      } else {
-//        if (!a1Arr[i].equals(a2Arr[i])) {
-//          return false;
-//        }
-//      }
-//
-//    }
-//    return true;
-//  }
-//}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/integration/hive/src/test/scala/org/apache/carbondata/hiveexampleCI/RunHiveExampleTest.scala
----------------------------------------------------------------------
diff --git a/integration/hive/src/test/scala/org/apache/carbondata/hiveexampleCI/RunHiveExampleTest.scala b/integration/hive/src/test/scala/org/apache/carbondata/hiveexampleCI/RunHiveExampleTest.scala
deleted file mode 100644
index 92f7374..0000000
--- a/integration/hive/src/test/scala/org/apache/carbondata/hiveexampleCI/RunHiveExampleTest.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hiveexampleCI
-
-import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.hiveexample.HiveExample
-
-class RunHiveExampleTest extends QueryTest with BeforeAndAfterAll {
-
-  private val spark = sqlContext.sparkSession
-
-  override def beforeAll: Unit = {
-    CarbonProperties.getInstance().addProperty(
-      CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-      CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-    CarbonProperties.getInstance().addProperty(
-      CarbonCommonConstants.CARBON_DATE_FORMAT,
-      CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
-  }
-
-  test("HiveExample") {
-    HiveExample.main(null)
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bd752e9d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c8c0fa4..1d69df3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -111,8 +111,8 @@
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <snappy.version>1.1.2.6</snappy.version>
     <hadoop.version>2.7.2</hadoop.version>
-    <httpclient.version>4.2.5</httpclient.version>
-    <httpcore.version>${httpclient.version}</httpcore.version>
+    <httpclient.version>4.3.4</httpclient.version>
+    <httpcore.version>4.3-alpha1</httpcore.version>
     <scala.binary.version>2.11</scala.binary.version>
     <scala.version>2.11.8</scala.version>
     <hadoop.deps.scope>compile</hadoop.deps.scope>