You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2019/03/25 12:24:56 UTC

[carbondata] branch master updated: [CARBONDATA-3329] Fixed deadlock issue during failed query

This is an automated email from the ASF dual-hosted git repository.

ravipesala pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new dc3d5ab  [CARBONDATA-3329] Fixed deadlock issue during failed query
dc3d5ab is described below

commit dc3d5abb0e10488dab7bc4b74c7aaf1e38e6b555
Author: kunal642 <ku...@gmail.com>
AuthorDate: Fri Mar 22 14:02:07 2019 +0530

    [CARBONDATA-3329] Fixed deadlock issue during failed query
    
    [200~Problem: When query fail, SparkExecuteStatementOperation.logError will trigger, a call to CarbonDatasourceHadoopRelation.toString which will try to extract carbontable from relation. For this a lock is acquired on HiveExternalCatalog and then in logger. But SparkExecuteStatementOperation.logError has already acquired lock on logger and is internally expecting a lock on HiveExternalCatalog which will lead to a deadlock
    
    This closes #3158
---
 .../scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala | 6 ++----
 .../org/apache/spark/sql/execution/command/cache/CacheUtil.scala    | 6 ++++++
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
index 672508f..57dd356 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDatasourceHadoopRelation.scala
@@ -56,8 +56,6 @@ case class CarbonDatasourceHadoopRelation(
     paths.head,
     CarbonEnv.getDatabaseName(caseInsensitiveMap.get("dbname"))(sparkSession),
     caseInsensitiveMap("tablename"))
-  lazy val databaseName: String = carbonTable.getDatabaseName
-  lazy val tableName: String = carbonTable.getTableName
   CarbonSession.updateSessionInfoToCurrentThread(sparkSession)
 
   @transient lazy val carbonRelation: CarbonRelation =
@@ -198,8 +196,8 @@ case class CarbonDatasourceHadoopRelation(
   override def unhandledFilters(filters: Array[Filter]): Array[Filter] = new Array[Filter](0)
 
   override def toString: String = {
-    "CarbonDatasourceHadoopRelation [ " + "Database name :" + databaseName +
-    ", " + "Table name :" + tableName + ", Schema :" + tableSchema + " ]"
+    "CarbonDatasourceHadoopRelation [ " + "Database name :" + identifier.getDatabaseName +
+    ", " + "Table name :" + identifier.getTableName + ", Schema :" + tableSchema + " ]"
   }
 
   override def sizeInBytes: Long = carbonRelation.sizeInBytes
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
index 615d8e0..18402e9 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/cache/CacheUtil.scala
@@ -44,6 +44,9 @@ object CacheUtil {
       CarbonDataMergerUtil.getValidSegmentList(absoluteTableIdentifier).asScala.flatMap {
         segment =>
           segment.getCommittedIndexFile.keySet().asScala
+      }.map { indexFile =>
+        indexFile.replace(CarbonCommonConstants.WINDOWS_FILE_SEPARATOR,
+          CarbonCommonConstants.FILE_SEPARATOR)
       }.toList
     } else {
       val tablePath = carbonTable.getTablePath
@@ -53,6 +56,9 @@ object CacheUtil {
         load =>
           val seg = new Segment(load.getLoadName, null, readCommittedScope)
           seg.getCommittedIndexFile.keySet().asScala
+      }.map { indexFile =>
+        indexFile.replace(CarbonCommonConstants.WINDOWS_FILE_SEPARATOR,
+          CarbonCommonConstants.FILE_SEPARATOR)
       }.toList
     }
   }