You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pinot.apache.org by xi...@apache.org on 2019/11/07 04:26:27 UTC

[incubator-pinot] 01/01: Use SparkContext.getOrCreate() to use shared SparkContext if possible.

This is an automated email from the ASF dual-hosted git repository.

xiangfu pushed a commit to branch pinot-spark-new
in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git

commit 34885ecf152f171ac087d8fc0cf4397602c755a0
Author: Xiang Fu <fx...@gmail.com>
AuthorDate: Wed Nov 6 20:24:50 2019 -0800

    Use SparkContext.getOrCreate() to use shared SparkContext if possible.
---
 .../main/java/org/apache/pinot/spark/jobs/SegmentCreationJob.java    | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/pinot-spark/src/main/java/org/apache/pinot/spark/jobs/SegmentCreationJob.java b/pinot-spark/src/main/java/org/apache/pinot/spark/jobs/SegmentCreationJob.java
index 74f09ee..fdd9c10 100644
--- a/pinot-spark/src/main/java/org/apache/pinot/spark/jobs/SegmentCreationJob.java
+++ b/pinot-spark/src/main/java/org/apache/pinot/spark/jobs/SegmentCreationJob.java
@@ -23,11 +23,9 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.Serializable;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
 import javax.annotation.Nullable;
@@ -57,6 +55,7 @@ import org.apache.pinot.core.segment.name.SegmentNameGenerator;
 import org.apache.pinot.core.segment.name.SimpleSegmentNameGenerator;
 import org.apache.pinot.spark.utils.JobPreparationHelper;
 import org.apache.pinot.spark.utils.PushLocation;
+import org.apache.spark.SparkContext;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.slf4j.Logger;
@@ -360,7 +359,7 @@ public class SegmentCreationJob extends BaseSegmentJob {
     }
     _properties.put(JobConfigConstants.SCHEMA, getSchema().toSingleLineJsonString());
 
-    JavaSparkContext sparkContext = new JavaSparkContext();
+    JavaSparkContext sparkContext = JavaSparkContext.fromSparkContext(SparkContext.getOrCreate());
     addDepsJarToDistributedCache(sparkContext);
     JavaRDD<String> pathRDD = sparkContext.parallelize(dataFilePathStrs, numDataFiles);
     pathRDD.zipWithIndex().foreach(tuple2 -> {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@pinot.apache.org
For additional commands, e-mail: commits-help@pinot.apache.org