You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/05/02 14:13:02 UTC

[38/50] [abbrv] incubator-carbondata git commit: fix metaStorePath with hive-site.xml

fix metaStorePath with hive-site.xml


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/0f3b6e45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/0f3b6e45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/0f3b6e45

Branch: refs/heads/12-dev
Commit: 0f3b6e45829d5f093091363203e78512b405bdcb
Parents: 33bfb49
Author: QiangCai <qi...@qq.com>
Authored: Fri Apr 28 20:10:22 2017 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Mon May 1 15:54:12 2017 +0530

----------------------------------------------------------------------
 .../org/apache/spark/sql/CarbonSession.scala    | 22 +++++++++++++-------
 1 file changed, 14 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/0f3b6e45/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
index 844b3b9..b436891 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
@@ -24,6 +24,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
 import org.apache.spark.sql.SparkSession.Builder
 import org.apache.spark.sql.hive.CarbonSessionState
 import org.apache.spark.sql.internal.{SessionState, SharedState}
+import org.apache.spark.util.Utils
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
@@ -81,6 +82,19 @@ object CarbonSession {
         getValue("options", builder).asInstanceOf[scala.collection.mutable.HashMap[String, String]]
       val userSuppliedContext: Option[SparkContext] =
         getValue("userSuppliedContext", builder).asInstanceOf[Option[SparkContext]]
+      val hadoopConf = new Configuration()
+      val configFile = Utils.getContextOrSparkClassLoader.getResource("hive-site.xml")
+      if (configFile != null) {
+        hadoopConf.addResource(configFile)
+      }
+      if (options.get(CarbonCommonConstants.HIVE_CONNECTION_URL).isEmpty &&
+          hadoopConf.get(CarbonCommonConstants.HIVE_CONNECTION_URL) == null) {
+        val metaStorePathAbsolute = new File(metaStorePath).getCanonicalPath
+        val hiveMetaStoreDB = metaStorePathAbsolute + "/metastore_db"
+        options ++= Map[String, String]((CarbonCommonConstants.HIVE_CONNECTION_URL,
+          s"jdbc:derby:;databaseName=$hiveMetaStoreDB;create=true"))
+      }
+
       // Get the session from current thread's active session.
       var session: SparkSession = SparkSession.getActiveSession match {
         case Some(sparkSession: CarbonSession) =>
@@ -135,14 +149,6 @@ object CarbonSession {
         CarbonProperties.getInstance()
           .addProperty(CarbonCommonConstants.STORE_LOCATION, storePath)
         session = new CarbonSession(sparkContext)
-        val hadoopConf = session.sharedState.sparkContext.hadoopConfiguration
-        if (options.get(CarbonCommonConstants.HIVE_CONNECTION_URL).isEmpty &&
-            hadoopConf.get(CarbonCommonConstants.HIVE_CONNECTION_URL) == null) {
-          val metaStorePathAbsolute = new File(metaStorePath).getCanonicalPath
-          val hiveMetaStoreDB = metaStorePathAbsolute + "/metastore_db"
-          options ++= Map[String, String]((CarbonCommonConstants.HIVE_CONNECTION_URL,
-            s"jdbc:derby:;databaseName=$hiveMetaStoreDB;create=true"))
-        }
         options.foreach { case (k, v) => session.sessionState.conf.setConfString(k, v) }
         SparkSession.setDefaultSession(session)