You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@carbondata.apache.org by GitBox <gi...@apache.org> on 2020/10/12 04:22:19 UTC

[GitHub] [carbondata] marchpure commented on a change in pull request #3976: [CARBONDATA-4026] Fix Thread leakage while Loading

marchpure commented on a change in pull request #3976:
URL: https://github.com/apache/carbondata/pull/3976#discussion_r503031223



##########
File path: core/src/main/java/org/apache/carbondata/core/statusmanager/StageInputCollector.java
##########
@@ -65,8 +65,15 @@
     collectStageFiles(table, hadoopConf, stageInputFiles, successFiles);
     if (stageInputFiles.size() > 0) {
       int numThreads = Math.min(Math.max(stageInputFiles.size(), 1), 10);
-      ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
-      return createInputSplits(executorService, stageInputFiles);
+      ExecutorService executorService = null;
+      try {
+        executorService = Executors.newFixedThreadPool(numThreads);

Review comment:
       I have modified code according to your suggestion

##########
File path: integration/spark/src/main/scala/org/apache/spark/rdd/CarbonMergeFilesRDD.scala
##########
@@ -158,20 +158,27 @@ object CarbonMergeFilesRDD {
       // remove all tmp folder of index files
       val startDelete = System.currentTimeMillis()
       val numThreads = Math.min(Math.max(partitionInfo.size(), 1), 10)
-      val executorService = Executors.newFixedThreadPool(numThreads)
-      val carbonSessionInfo = ThreadLocalSessionInfo.getCarbonSessionInfo
-      partitionInfo
-        .asScala
-        .map { partitionPath =>
-          executorService.submit(new Runnable {
-            override def run(): Unit = {
-              ThreadLocalSessionInfo.setCarbonSessionInfo(carbonSessionInfo)
-              FileFactory.deleteAllCarbonFilesOfDir(
-                FileFactory.getCarbonFile(partitionPath + "/" + tempFolderPath))
-            }
-          })
+      var executorService: ExecutorService = null
+      try {
+        executorService = Executors.newFixedThreadPool(numThreads)

Review comment:
       I have modified code according to your suggestion

##########
File path: integration/spark/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala
##########
@@ -123,6 +123,9 @@ object IndexServer extends ServerInterface {
         t
       }
     })
+    indexServerExecutorService.get.shutdown()

Review comment:
       I have modified code according to your suggestion




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org