You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ku...@apache.org on 2019/08/21 09:10:33 UTC

[carbondata] branch master updated: [CARBONDATA-3493] Initialize Profiler in CarbonEnv

This is an automated email from the ASF dual-hosted git repository.

kunalkapoor pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new f02f327  [CARBONDATA-3493] Initialize Profiler in CarbonEnv
f02f327 is described below

commit f02f327f117788c1bdc67e6e524d38bb91329abf
Author: akashrn5 <ak...@gmail.com>
AuthorDate: Wed Jul 31 18:54:41 2019 +0530

    [CARBONDATA-3493] Initialize Profiler in CarbonEnv
    
    Problem: After enabling "enable.query.statistics", exception is
    thrown while querying because profiler is not initialized before
    setting up the rpc end point connection.
    
    Solution: Initialized Profiler in CarbonEnv before setting up the
    rpc end point connection.
    
    This closes #3342
    
    Co-authored-by: shivamasn <sh...@gmail.com>
---
 integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala  | 2 ++
 .../spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala      | 2 --
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index c13e7b9..1cbd156 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -28,6 +28,7 @@ import org.apache.spark.sql.execution.command.mv._
 import org.apache.spark.sql.execution.command.preaaggregate._
 import org.apache.spark.sql.execution.command.timeseries.TimeSeriesFunction
 import org.apache.spark.sql.hive._
+import org.apache.spark.sql.profiler.Profiler
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
@@ -121,6 +122,7 @@ class CarbonEnv {
         initialized = true
       }
     }
+    Profiler.initialize(sparkSession.sparkContext)
     LOGGER.info("Initialize CarbonEnv completed...")
   }
 }
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
index 7b1bf4c..deefcd1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSession.scala
@@ -259,8 +259,6 @@ object CarbonSession {
         }
         options.foreach { case (k, v) => session.sessionState.conf.setConfString(k, v) }
         SparkSession.setDefaultSession(session)
-        // Setup monitor end point and register CarbonMonitorListener
-        Profiler.initialize(sparkContext)
         // Register a successfully instantiated context to the singleton. This should be at the
         // end of the class definition so that the singleton is updated only if there is no
         // exception in the construction of the instance.