You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/11/06 07:19:50 UTC

[21/51] [partial] spark-website git commit: Add 1.6.3 doc.

http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/org/apache/spark/SparkContext.html
----------------------------------------------------------------------
diff --git a/site/docs/1.6.3/api/java/org/apache/spark/SparkContext.html b/site/docs/1.6.3/api/java/org/apache/spark/SparkContext.html
new file mode 100644
index 0000000..786b34c
--- /dev/null
+++ b/site/docs/1.6.3/api/java/org/apache/spark/SparkContext.html
@@ -0,0 +1,2961 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:17 PDT 2016 -->
+<title>SparkContext (Spark 1.6.3 JavaDoc)</title>
+<meta name="date" content="2016-11-02">
+<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
+</head>
+<body>
+<script type="text/javascript"><!--
+    if (location.href.indexOf('is-external=true') == -1) {
+        parent.document.title="SparkContext (Spark 1.6.3 JavaDoc)";
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar_top">
+<!--   -->
+</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="../../../overview-summary.html">Overview</a></li>
+<li><a href="package-summary.html">Package</a></li>
+<li class="navBarCell1Rev">Class</li>
+<li><a href="package-tree.html">Tree</a></li>
+<li><a href="../../../deprecated-list.html">Deprecated</a></li>
+<li><a href="../../../index-all.html">Index</a></li>
+<li><a href="../../../help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li><a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark"><span class="strong">Prev Class</span></a></li>
+<li><a href="../../../org/apache/spark/SparkContext.DoubleAccumulatorParam$.html" title="class in org.apache.spark"><span class="strong">Next Class</span></a></li>
+</ul>
+<ul class="navList">
+<li><a href="../../../index.html?org/apache/spark/SparkContext.html" target="_top">Frames</a></li>
+<li><a href="SparkContext.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<div>
+<ul class="subNavList">
+<li>Summary:&nbsp;</li>
+<li><a href="#nested_class_summary">Nested</a>&nbsp;|&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor_summary">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method_summary">Method</a></li>
+</ul>
+<ul class="subNavList">
+<li>Detail:&nbsp;</li>
+<li>Field&nbsp;|&nbsp;</li>
+<li><a href="#constructor_detail">Constr</a>&nbsp;|&nbsp;</li>
+<li><a href="#method_detail">Method</a></li>
+</ul>
+</div>
+<a name="skip-navbar_top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<!-- ======== START OF CLASS DATA ======== -->
+<div class="header">
+<div class="subTitle">org.apache.spark</div>
+<h2 title="Class SparkContext" class="title">Class SparkContext</h2>
+</div>
+<div class="contentContainer">
+<ul class="inheritance">
+<li>Object</li>
+<li>
+<ul class="inheritance">
+<li>org.apache.spark.SparkContext</li>
+</ul>
+</li>
+</ul>
+<div class="description">
+<ul class="blockList">
+<li class="blockList">
+<dl>
+<dt>All Implemented Interfaces:</dt>
+<dd><a href="../../../org/apache/spark/Logging.html" title="interface in org.apache.spark">Logging</a></dd>
+</dl>
+<hr>
+<br>
+<pre>public class <span class="strong">SparkContext</span>
+extends Object
+implements <a href="../../../org/apache/spark/Logging.html" title="interface in org.apache.spark">Logging</a></pre>
+<div class="block">Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
+ cluster, and can be used to create RDDs, accumulators and broadcast variables on that cluster.
+ <p>
+ Only one SparkContext may be active per JVM.  You must <code>stop()</code> the active SparkContext before
+ creating a new one.  This limitation may eventually be removed; see SPARK-2243 for more details.
+ <p>
+ param:  config a Spark Config object describing the application configuration. Any settings in
+   this config overrides the default configs as well as system properties.</div>
+</li>
+</ul>
+</div>
+<div class="summary">
+<ul class="blockList">
+<li class="blockList">
+<!-- ======== NESTED CLASS SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="nested_class_summary">
+<!--   -->
+</a>
+<h3>Nested Class Summary</h3>
+<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Nested Class Summary table, listing nested classes, and an explanation">
+<caption><span>Nested Classes</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Class and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.DoubleAccumulatorParam$.html" title="class in org.apache.spark">SparkContext.DoubleAccumulatorParam$</a></strong></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.FloatAccumulatorParam$.html" title="class in org.apache.spark">SparkContext.FloatAccumulatorParam$</a></strong></code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.IntAccumulatorParam$.html" title="class in org.apache.spark">SparkContext.IntAccumulatorParam$</a></strong></code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static class&nbsp;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.LongAccumulatorParam$.html" title="class in org.apache.spark">SparkContext.LongAccumulatorParam$</a></strong></code>&nbsp;</td>
+</tr>
+</table>
+</li>
+</ul>
+<!-- ======== CONSTRUCTOR SUMMARY ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor_summary">
+<!--   -->
+</a>
+<h3>Constructor Summary</h3>
+<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
+<caption><span>Constructors</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colOne" scope="col">Constructor and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colOne"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SparkContext()">SparkContext</a></strong>()</code>
+<div class="block">Create a SparkContext that loads settings from system properties (for instance, when
+ launching with ./bin/spark-submit).</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SparkContext(org.apache.spark.SparkConf)">SparkContext</a></strong>(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SparkContext(org.apache.spark.SparkConf,%20scala.collection.Map)">SparkContext</a></strong>(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config,
+            scala.collection.Map&lt;String,scala.collection.Set&lt;<a href="../../../org/apache/spark/scheduler/SplitInfo.html" title="class in org.apache.spark.scheduler">SplitInfo</a>&gt;&gt;&nbsp;preferredNodeLocationData)</code>
+<div class="block">:: DeveloperApi ::
+ Alternative constructor for setting preferred locations where Spark will create executors.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SparkContext(java.lang.String,%20java.lang.String,%20org.apache.spark.SparkConf)">SparkContext</a></strong>(String&nbsp;master,
+            String&nbsp;appName,
+            <a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</code>
+<div class="block">Alternative constructor that allows setting common Spark properties directly</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SparkContext(java.lang.String,%20java.lang.String,%20java.lang.String,%20scala.collection.Seq,%20scala.collection.Map,%20scala.collection.Map)">SparkContext</a></strong>(String&nbsp;master,
+            String&nbsp;appName,
+            String&nbsp;sparkHome,
+            scala.collection.Seq&lt;String&gt;&nbsp;jars,
+            scala.collection.Map&lt;String,String&gt;&nbsp;environment,
+            scala.collection.Map&lt;String,scala.collection.Set&lt;<a href="../../../org/apache/spark/scheduler/SplitInfo.html" title="class in org.apache.spark.scheduler">SplitInfo</a>&gt;&gt;&nbsp;preferredNodeLocationData)</code>
+<div class="block">Alternative constructor that allows setting common Spark properties directly</div>
+</td>
+</tr>
+</table>
+</li>
+</ul>
+<!-- ========== METHOD SUMMARY =========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method_summary">
+<!--   -->
+</a>
+<h3>Method Summary</h3>
+<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
+<caption><span>Methods</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Method and Description</th>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;R,T&gt;&nbsp;<a href="../../../org/apache/spark/Accumulable.html" title="class in org.apache.spark">Accumulable</a>&lt;R,T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#accumulable(R,%20org.apache.spark.AccumulableParam)">accumulable</a></strong>(R&nbsp;initialValue,
+           <a href="../../../org/apache/spark/AccumulableParam.html" title="interface in org.apache.spark">AccumulableParam</a>&lt;R,T&gt;&nbsp;param)</code>
+<div class="block">Create an <a href="../../../org/apache/spark/Accumulable.html" title="class in org.apache.spark"><code>Accumulable</code></a> shared variable, to which tasks can add values
+ with <code>+=</code>.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;R,T&gt;&nbsp;<a href="../../../org/apache/spark/Accumulable.html" title="class in org.apache.spark">Accumulable</a>&lt;R,T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#accumulable(R,%20java.lang.String,%20org.apache.spark.AccumulableParam)">accumulable</a></strong>(R&nbsp;initialValue,
+           String&nbsp;name,
+           <a href="../../../org/apache/spark/AccumulableParam.html" title="interface in org.apache.spark">AccumulableParam</a>&lt;R,T&gt;&nbsp;param)</code>
+<div class="block">Create an <a href="../../../org/apache/spark/Accumulable.html" title="class in org.apache.spark"><code>Accumulable</code></a> shared variable, with a name for display in the
+ Spark UI.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;R,T&gt;&nbsp;<a href="../../../org/apache/spark/Accumulable.html" title="class in org.apache.spark">Accumulable</a>&lt;R,T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#accumulableCollection(R,%20scala.Function1,%20scala.reflect.ClassTag)">accumulableCollection</a></strong>(R&nbsp;initialValue,
+                     scala.Function1&lt;R,scala.collection.generic.Growable&lt;T&gt;&gt;&nbsp;evidence$9,
+                     scala.reflect.ClassTag&lt;R&gt;&nbsp;evidence$10)</code>
+<div class="block">Create an accumulator from a "mutable collection" type.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/Accumulator.html" title="class in org.apache.spark">Accumulator</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#accumulator(T,%20org.apache.spark.AccumulatorParam)">accumulator</a></strong>(T&nbsp;initialValue,
+           <a href="../../../org/apache/spark/AccumulatorParam.html" title="interface in org.apache.spark">AccumulatorParam</a>&lt;T&gt;&nbsp;param)</code>
+<div class="block">Create an <a href="../../../org/apache/spark/Accumulator.html" title="class in org.apache.spark"><code>Accumulator</code></a> variable of a given type, which tasks can "add"
+ values to using the <code>+=</code> method.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/Accumulator.html" title="class in org.apache.spark">Accumulator</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#accumulator(T,%20java.lang.String,%20org.apache.spark.AccumulatorParam)">accumulator</a></strong>(T&nbsp;initialValue,
+           String&nbsp;name,
+           <a href="../../../org/apache/spark/AccumulatorParam.html" title="interface in org.apache.spark">AccumulatorParam</a>&lt;T&gt;&nbsp;param)</code>
+<div class="block">Create an <a href="../../../org/apache/spark/Accumulator.html" title="class in org.apache.spark"><code>Accumulator</code></a> variable of a given type, with a name for display
+ in the Spark UI.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.collection.mutable.HashMap&lt;String,Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addedFiles()">addedFiles</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.collection.mutable.HashMap&lt;String,Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addedJars()">addedJars</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addFile(java.lang.String)">addFile</a></strong>(String&nbsp;path)</code>
+<div class="block">Add a file to be downloaded with this Spark job on every node.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addFile(java.lang.String,%20boolean)">addFile</a></strong>(String&nbsp;path,
+       boolean&nbsp;recursive)</code>
+<div class="block">Add a file to be downloaded with this Spark job on every node.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addJar(java.lang.String)">addJar</a></strong>(String&nbsp;path)</code>
+<div class="block">Adds a JAR dependency for all tasks to be executed on this SparkContext in the future.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#addSparkListener(org.apache.spark.scheduler.SparkListener)">addSparkListener</a></strong>(<a href="../../../org/apache/spark/scheduler/SparkListener.html" title="interface in org.apache.spark.scheduler">SparkListener</a>&nbsp;listener)</code>
+<div class="block">:: DeveloperApi ::
+ Register a listener to receive up-calls from events that happen during execution.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.Option&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#applicationAttemptId()">applicationAttemptId</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#applicationId()">applicationId</a></strong>()</code>
+<div class="block">A unique identifier for the Spark application.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#appName()">appName</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;String,<a href="../../../org/apache/spark/input/PortableDataStream.html" title="class in org.apache.spark.input">PortableDataStream</a>&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#binaryFiles(java.lang.String,%20int)">binaryFiles</a></strong>(String&nbsp;path,
+           int&nbsp;minPartitions)</code>
+<div class="block">Get an RDD for a Hadoop-readable dataset as PortableDataStream for each file
+ (useful for binary data)</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;byte[]&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#binaryRecords(java.lang.String,%20int,%20org.apache.hadoop.conf.Configuration)">binaryRecords</a></strong>(String&nbsp;path,
+             int&nbsp;recordLength,
+             org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
+<div class="block">Load data from a flat binary file, assuming the length of each record is constant.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#booleanWritableConverter()">booleanWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.BooleanWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#boolToBoolWritable(boolean)">boolToBoolWritable</a></strong>(boolean&nbsp;b)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/broadcast/Broadcast.html" title="class in org.apache.spark.broadcast">Broadcast</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#broadcast(T,%20scala.reflect.ClassTag)">broadcast</a></strong>(T&nbsp;value,
+         scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$11)</code>
+<div class="block">Broadcast a read-only variable to the cluster, returning a
+ <a href="../../../org/apache/spark/broadcast/Broadcast.html" title="class in org.apache.spark.broadcast"><code>Broadcast</code></a> object for reading it in distributed functions.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.BytesWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#bytesToBytesWritable(byte[])">bytesToBytesWritable</a></strong>(byte[]&nbsp;aob)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;byte[]&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#bytesWritableConverter()">bytesWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#cancelAllJobs()">cancelAllJobs</a></strong>()</code>
+<div class="block">Cancel all jobs that have been scheduled or are running.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#cancelJobGroup(java.lang.String)">cancelJobGroup</a></strong>(String&nbsp;groupId)</code>
+<div class="block">Cancel active jobs for the specified group.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.Option&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#checkpointDir()">checkpointDir</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#clearCallSite()">clearCallSite</a></strong>()</code>
+<div class="block">Clear the thread-local property for overriding the call sites
+ of actions and RDDs.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#clearFiles()">clearFiles</a></strong>()</code>
+<div class="block">Clear the job's list of files added by <code>addFile</code> so that they do not get downloaded to
+ any new nodes.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#clearJars()">clearJars</a></strong>()</code>
+<div class="block">Clear the job's list of JARs added by <code>addJar</code> so that they do not get downloaded to
+ any new nodes.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#clearJobGroup()">clearJobGroup</a></strong>()</code>
+<div class="block">Clear the current thread's job group ID and its description.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>int</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#defaultMinPartitions()">defaultMinPartitions</a></strong>()</code>
+<div class="block">Default min number of partitions for Hadoop RDDs when not given by user
+ Notice that we use math.min so the "defaultMinPartitions" cannot be higher than 2.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>int</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#defaultMinSplits()">defaultMinSplits</a></strong>()</code>
+<div class="block">Default min number of partitions for Hadoop RDDs when not given by user</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>int</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#defaultParallelism()">defaultParallelism</a></strong>()</code>
+<div class="block">Default level of parallelism to use when not given by user (e.g.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static <a href="../../../org/apache/spark/rdd/DoubleRDDFunctions.html" title="class in org.apache.spark.rdd">DoubleRDDFunctions</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#doubleRDDToDoubleRDDFunctions(org.apache.spark.rdd.RDD)">doubleRDDToDoubleRDDFunctions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;Object&gt;&nbsp;rdd)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.DoubleWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#doubleToDoubleWritable(double)">doubleToDoubleWritable</a></strong>(double&nbsp;d)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#doubleWritableConverter()">doubleWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#DRIVER_IDENTIFIER()">DRIVER_IDENTIFIER</a></strong>()</code>
+<div class="block">Executor id for the driver.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<any></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#emptyRDD(scala.reflect.ClassTag)">emptyRDD</a></strong>(scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$8)</code>
+<div class="block">Get an RDD that has no partitions or elements.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.collection.mutable.HashMap&lt;String,String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#executorEnvs()">executorEnvs</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#externalBlockStoreFolderName()">externalBlockStoreFolderName</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.collection.Seq&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#files()">files</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.FloatWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#floatToFloatWritable(float)">floatToFloatWritable</a></strong>(float&nbsp;f)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#floatWritableConverter()">floatWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.collection.Seq&lt;org.apache.spark.scheduler.Schedulable&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getAllPools()">getAllPools</a></strong>()</code>
+<div class="block">:: DeveloperApi ::
+ Return pools for fair scheduler</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.Option&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getCheckpointDir()">getCheckpointDir</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getConf()">getConf</a></strong>()</code>
+<div class="block">Return a copy of this SparkContext's configuration.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.collection.Map&lt;String,scala.Tuple2&lt;Object,Object&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getExecutorMemoryStatus()">getExecutorMemoryStatus</a></strong>()</code>
+<div class="block">Return a map from the slave to the max memory available for caching and the remaining
+ memory available for caching.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/storage/StorageStatus.html" title="class in org.apache.spark.storage">StorageStatus</a>[]</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getExecutorStorageStatus()">getExecutorStorageStatus</a></strong>()</code>
+<div class="block">:: DeveloperApi ::
+ Return information about blocks stored in all of the slaves</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getLocalProperty(java.lang.String)">getLocalProperty</a></strong>(String&nbsp;key)</code>
+<div class="block">Get a local property set in this thread, or null if it is missing.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static <a href="../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getOrCreate()">getOrCreate</a></strong>()</code>
+<div class="block">This function may be used to get or instantiate a SparkContext and register it as a
+ singleton object.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static <a href="../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getOrCreate(org.apache.spark.SparkConf)">getOrCreate</a></strong>(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config)</code>
+<div class="block">This function may be used to get or instantiate a SparkContext and register it as a
+ singleton object.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.collection.Map&lt;Object,<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;?&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getPersistentRDDs()">getPersistentRDDs</a></strong>()</code>
+<div class="block">Returns an immutable map of RDDs that have marked themselves as persistent via cache() call.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.Option&lt;org.apache.spark.scheduler.Schedulable&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getPoolForName(java.lang.String)">getPoolForName</a></strong>(String&nbsp;pool)</code>
+<div class="block">:: DeveloperApi ::
+ Return the pool associated with the given name, if one exists</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/storage/RDDInfo.html" title="class in org.apache.spark.storage">RDDInfo</a>[]</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getRDDStorageInfo()">getRDDStorageInfo</a></strong>()</code>
+<div class="block">:: DeveloperApi ::
+ Return information about what RDDs are cached, if they are in mem or on disk, how much space
+ they take, etc.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>scala.Enumeration.Value</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#getSchedulingMode()">getSchedulingMode</a></strong>()</code>
+<div class="block">Return current scheduling mode</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>org.apache.hadoop.conf.Configuration</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#hadoopConfiguration()">hadoopConfiguration</a></strong>()</code>
+<div class="block">A default Hadoop Configuration for the Hadoop code (e.g.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#hadoopFile(java.lang.String,%20java.lang.Class,%20java.lang.Class,%20java.lang.Class,%20int)">hadoopFile</a></strong>(String&nbsp;path,
+          Class&lt;? extends org.apache.hadoop.mapred.InputFormat&lt;K,V&gt;&gt;&nbsp;inputFormatClass,
+          Class&lt;K&gt;&nbsp;keyClass,
+          Class&lt;V&gt;&nbsp;valueClass,
+          int&nbsp;minPartitions)</code>
+<div class="block">Get an RDD for a Hadoop file with an arbitrary InputFormat</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;K,V,F extends org.apache.hadoop.mapred.InputFormat&lt;K,V&gt;&gt;&nbsp;<br><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#hadoopFile(java.lang.String,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">hadoopFile</a></strong>(String&nbsp;path,
+          scala.reflect.ClassTag&lt;K&gt;&nbsp;km,
+          scala.reflect.ClassTag&lt;V&gt;&nbsp;vm,
+          scala.reflect.ClassTag&lt;F&gt;&nbsp;fm)</code>
+<div class="block">Smarter version of hadoopFile() that uses class tags to figure out the classes of keys,
+ values and the InputFormat so that users don't need to pass them directly.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V,F extends org.apache.hadoop.mapred.InputFormat&lt;K,V&gt;&gt;&nbsp;<br><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#hadoopFile(java.lang.String,%20int,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">hadoopFile</a></strong>(String&nbsp;path,
+          int&nbsp;minPartitions,
+          scala.reflect.ClassTag&lt;K&gt;&nbsp;km,
+          scala.reflect.ClassTag&lt;V&gt;&nbsp;vm,
+          scala.reflect.ClassTag&lt;F&gt;&nbsp;fm)</code>
+<div class="block">Smarter version of hadoopFile() that uses class tags to figure out the classes of keys,
+ values and the InputFormat so that users don't need to pass them directly.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#hadoopRDD(org.apache.hadoop.mapred.JobConf,%20java.lang.Class,%20java.lang.Class,%20java.lang.Class,%20int)">hadoopRDD</a></strong>(org.apache.hadoop.mapred.JobConf&nbsp;conf,
+         Class&lt;? extends org.apache.hadoop.mapred.InputFormat&lt;K,V&gt;&gt;&nbsp;inputFormatClass,
+         Class&lt;K&gt;&nbsp;keyClass,
+         Class&lt;V&gt;&nbsp;valueClass,
+         int&nbsp;minPartitions)</code>
+<div class="block">Get an RDD for a Hadoop-readable dataset from a Hadoop JobConf given its InputFormat and other
+ necessary info (e.g.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#initLocalProperties()">initLocalProperties</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.IntWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#intToIntWritable(int)">intToIntWritable</a></strong>(int&nbsp;i)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#intWritableConverter()">intWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>boolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#isLocal()">isLocal</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>boolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#isStopped()">isStopped</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static scala.Option&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#jarOfClass(java.lang.Class)">jarOfClass</a></strong>(Class&lt;?&gt;&nbsp;cls)</code>
+<div class="block">Find the JAR from which a given class was loaded, to make it easy for users to pass
+ their JARs to SparkContext.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static scala.Option&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#jarOfObject(java.lang.Object)">jarOfObject</a></strong>(Object&nbsp;obj)</code>
+<div class="block">Find the JAR that contains the class of a particular object, to make it easy for users
+ to pass their JARs to SparkContext.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>scala.collection.Seq&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#jars()">jars</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>boolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#killExecutor(java.lang.String)">killExecutor</a></strong>(String&nbsp;executorId)</code>
+<div class="block">:: DeveloperApi ::
+ Request that the cluster manager kill the specified executor.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>boolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#killExecutors(scala.collection.Seq)">killExecutors</a></strong>(scala.collection.Seq&lt;String&gt;&nbsp;executorIds)</code>
+<div class="block">:: DeveloperApi ::
+ Request that the cluster manager kill the specified executors.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#LEGACY_DRIVER_IDENTIFIER()">LEGACY_DRIVER_IDENTIFIER</a></strong>()</code>
+<div class="block">Legacy version of DRIVER_IDENTIFIER, retained for backwards-compatibility.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>org.apache.spark.scheduler.LiveListenerBus</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#listenerBus()">listenerBus</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.LongWritable</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#longToLongWritable(long)">longToLongWritable</a></strong>(long&nbsp;l)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#longWritableConverter()">longWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#makeRDD(scala.collection.Seq,%20int,%20scala.reflect.ClassTag)">makeRDD</a></strong>(scala.collection.Seq&lt;T&gt;&nbsp;seq,
+       int&nbsp;numSlices,
+       scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$2)</code>
+<div class="block">Distribute a local Scala collection to form an RDD.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#makeRDD(scala.collection.Seq,%20scala.reflect.ClassTag)">makeRDD</a></strong>(scala.collection.Seq&lt;scala.Tuple2&lt;T,scala.collection.Seq&lt;String&gt;&gt;&gt;&nbsp;seq,
+       scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$3)</code>
+<div class="block">Distribute a local Scala collection to form an RDD, with one or more
+ location preferences (hostnames of Spark nodes) for each object.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#master()">master</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>org.apache.spark.metrics.MetricsSystem</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#metricsSystem()">metricsSystem</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V,F extends org.apache.hadoop.mapreduce.InputFormat&lt;K,V&gt;&gt;&nbsp;<br><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#newAPIHadoopFile(java.lang.String,%20java.lang.Class,%20java.lang.Class,%20java.lang.Class,%20org.apache.hadoop.conf.Configuration)">newAPIHadoopFile</a></strong>(String&nbsp;path,
+                Class&lt;F&gt;&nbsp;fClass,
+                Class&lt;K&gt;&nbsp;kClass,
+                Class&lt;V&gt;&nbsp;vClass,
+                org.apache.hadoop.conf.Configuration&nbsp;conf)</code>
+<div class="block">Get an RDD for a given Hadoop file with an arbitrary new API InputFormat
+ and extra configuration options to pass to the input format.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;K,V,F extends org.apache.hadoop.mapreduce.InputFormat&lt;K,V&gt;&gt;&nbsp;<br><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#newAPIHadoopFile(java.lang.String,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">newAPIHadoopFile</a></strong>(String&nbsp;path,
+                scala.reflect.ClassTag&lt;K&gt;&nbsp;km,
+                scala.reflect.ClassTag&lt;V&gt;&nbsp;vm,
+                scala.reflect.ClassTag&lt;F&gt;&nbsp;fm)</code>
+<div class="block">Get an RDD for a Hadoop file with an arbitrary new API InputFormat.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V,F extends org.apache.hadoop.mapreduce.InputFormat&lt;K,V&gt;&gt;&nbsp;<br><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#newAPIHadoopRDD(org.apache.hadoop.conf.Configuration,%20java.lang.Class,%20java.lang.Class,%20java.lang.Class)">newAPIHadoopRDD</a></strong>(org.apache.hadoop.conf.Configuration&nbsp;conf,
+               Class&lt;F&gt;&nbsp;fClass,
+               Class&lt;K&gt;&nbsp;kClass,
+               Class&lt;V&gt;&nbsp;vClass)</code>
+<div class="block">Get an RDD for a given Hadoop file with an arbitrary new API InputFormat
+ and extra configuration options to pass to the input format.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static &lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/DoubleRDDFunctions.html" title="class in org.apache.spark.rdd">DoubleRDDFunctions</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#numericRDDToDoubleRDDFunctions(org.apache.spark.rdd.RDD,%20scala.math.Numeric)">numericRDDToDoubleRDDFunctions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+                              scala.math.Numeric&lt;T&gt;&nbsp;num)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#objectFile(java.lang.String,%20int,%20scala.reflect.ClassTag)">objectFile</a></strong>(String&nbsp;path,
+          int&nbsp;minPartitions,
+          scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$4)</code>
+<div class="block">Load an RDD saved as a SequenceFile containing serialized objects, with NullWritable keys and
+ BytesWritable values that contain a serialized partition.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#parallelize(scala.collection.Seq,%20int,%20scala.reflect.ClassTag)">parallelize</a></strong>(scala.collection.Seq&lt;T&gt;&nbsp;seq,
+           int&nbsp;numSlices,
+           scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$1)</code>
+<div class="block">Distribute a local Scala collection to form an RDD.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code><any></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#persistentRdds()">persistentRdds</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;Object&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#range(long,%20long,%20long,%20int)">range</a></strong>(long&nbsp;start,
+     long&nbsp;end,
+     long&nbsp;step,
+     int&nbsp;numSlices)</code>
+<div class="block">Creates a new RDD[Long] containing elements from <code>start</code> to <code>end</code>(exclusive), increased by
+ <code>step</code> every element.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#RDD_SCOPE_KEY()">RDD_SCOPE_KEY</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#RDD_SCOPE_NO_OVERRIDE_KEY()">RDD_SCOPE_NO_OVERRIDE_KEY</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static &lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/AsyncRDDActions.html" title="class in org.apache.spark.rdd">AsyncRDDActions</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#rddToAsyncRDDActions(org.apache.spark.rdd.RDD,%20scala.reflect.ClassTag)">rddToAsyncRDDActions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+                    scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$22)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static &lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/OrderedRDDFunctions.html" title="class in org.apache.spark.rdd">OrderedRDDFunctions</a>&lt;K,V,scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#rddToOrderedRDDFunctions(org.apache.spark.rdd.RDD,%20scala.math.Ordering,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">rddToOrderedRDDFunctions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                        scala.math.Ordering&lt;K&gt;&nbsp;evidence$27,
+                        scala.reflect.ClassTag&lt;K&gt;&nbsp;evidence$28,
+                        scala.reflect.ClassTag&lt;V&gt;&nbsp;evidence$29)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static &lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/PairRDDFunctions.html" title="class in org.apache.spark.rdd">PairRDDFunctions</a>&lt;K,V&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#rddToPairRDDFunctions(org.apache.spark.rdd.RDD,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.math.Ordering)">rddToPairRDDFunctions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                     scala.reflect.ClassTag&lt;K&gt;&nbsp;kt,
+                     scala.reflect.ClassTag&lt;V&gt;&nbsp;vt,
+                     scala.math.Ordering&lt;K&gt;&nbsp;ord)</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static &lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/SequenceFileRDDFunctions.html" title="class in org.apache.spark.rdd">SequenceFileRDDFunctions</a>&lt;K,V&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#rddToSequenceFileRDDFunctions(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.reflect.ClassTag,%20scala.Function1,%20scala.reflect.ClassTag)">rddToSequenceFileRDDFunctions</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                             scala.Function1&lt;K,org.apache.hadoop.io.Writable&gt;&nbsp;evidence$23,
+                             scala.reflect.ClassTag&lt;K&gt;&nbsp;evidence$24,
+                             scala.Function1&lt;V,org.apache.hadoop.io.Writable&gt;&nbsp;evidence$25,
+                             scala.reflect.ClassTag&lt;V&gt;&nbsp;evidence$26)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>boolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#requestExecutors(int)">requestExecutors</a></strong>(int&nbsp;numAdditionalExecutors)</code>
+<div class="block">:: DeveloperApi ::
+ Request an additional number of executors from the cluster manager.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U,R&gt;&nbsp;<a href="../../../org/apache/spark/partial/PartialResult.html" title="class in org.apache.spark.partial">PartialResult</a>&lt;R&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runApproximateJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20,%20long)">runApproximateJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+                 scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+                 <any>&nbsp;evaluator,
+                 long&nbsp;timeout)</code>
+<div class="block">:: DeveloperApi ::
+ Run a job that can return approximate results.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function1&lt;scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$19)</code>
+<div class="block">Run a job on all partitions in an RDD and return the results in an array.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.Function2,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function1&lt;scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;processPartition,
+      scala.Function2&lt;Object,U,scala.runtime.BoxedUnit&gt;&nbsp;resultHandler,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$21)</code>
+<div class="block">Run a job on all partitions in an RDD and pass the results to a handler function.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.collection.Seq,%20boolean,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function1&lt;scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      boolean&nbsp;allowLocal,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$17)</code>
+<div class="block">Run a job on a given set of partitions of an RDD, but take a function of type
+ <code>Iterator[T] =&gt; U</code> instead of <code>(TaskContext, Iterator[T]) =&gt; U</code>.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.collection.Seq,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function1&lt;scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$14)</code>
+<div class="block">Run a job on a given set of partitions of an RDD, but take a function of type
+ <code>Iterator[T] =&gt; U</code> instead of <code>(TaskContext, Iterator[T]) =&gt; U</code>.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$18)</code>
+<div class="block">Run a job on all partitions in an RDD and return the results in an array.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.Function2,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;processPartition,
+      scala.Function2&lt;Object,U,scala.runtime.BoxedUnit&gt;&nbsp;resultHandler,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$20)</code>
+<div class="block">Run a job on all partitions in an RDD and pass the results to a handler function.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.collection.Seq,%20boolean,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      boolean&nbsp;allowLocal,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$16)</code>
+<div class="block">Run a function on a given set of partitions in an RDD and return the results as an array.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.collection.Seq,%20boolean,%20scala.Function2,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      boolean&nbsp;allowLocal,
+      scala.Function2&lt;Object,U,scala.runtime.BoxedUnit&gt;&nbsp;resultHandler,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$15)</code>
+<div class="block">Run a function on a given set of partitions in an RDD and pass the results to the given
+ handler function.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;Object</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.collection.Seq,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$13)</code>
+<div class="block">Run a function on a given set of partitions in an RDD and return the results as an array.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U&gt;&nbsp;void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#runJob(org.apache.spark.rdd.RDD,%20scala.Function2,%20scala.collection.Seq,%20scala.Function2,%20scala.reflect.ClassTag)">runJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+      scala.Function2&lt;<a href="../../../org/apache/spark/TaskContext.html" title="class in org.apache.spark">TaskContext</a>,scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;func,
+      scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+      scala.Function2&lt;Object,U,scala.runtime.BoxedUnit&gt;&nbsp;resultHandler,
+      scala.reflect.ClassTag&lt;U&gt;&nbsp;evidence$12)</code>
+<div class="block">Run a function on a given set of partitions in an RDD and pass the results to the given
+ handler function.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#sequenceFile(java.lang.String,%20java.lang.Class,%20java.lang.Class)">sequenceFile</a></strong>(String&nbsp;path,
+            Class&lt;K&gt;&nbsp;keyClass,
+            Class&lt;V&gt;&nbsp;valueClass)</code>
+<div class="block">Get an RDD for a Hadoop SequenceFile with given key and value types.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#sequenceFile(java.lang.String,%20java.lang.Class,%20java.lang.Class,%20int)">sequenceFile</a></strong>(String&nbsp;path,
+            Class&lt;K&gt;&nbsp;keyClass,
+            Class&lt;V&gt;&nbsp;valueClass,
+            int&nbsp;minPartitions)</code>
+<div class="block">Get an RDD for a Hadoop SequenceFile with given key and value types.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#sequenceFile(java.lang.String,%20int,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.Function0,%20scala.Function0)">sequenceFile</a></strong>(String&nbsp;path,
+            int&nbsp;minPartitions,
+            scala.reflect.ClassTag&lt;K&gt;&nbsp;km,
+            scala.reflect.ClassTag&lt;V&gt;&nbsp;vm,
+            scala.Function0&lt;org.apache.spark.WritableConverter&lt;K&gt;&gt;&nbsp;kcf,
+            scala.Function0&lt;org.apache.spark.WritableConverter&lt;V&gt;&gt;&nbsp;vcf)</code>
+<div class="block">Version of sequenceFile() for types implicitly convertible to Writables through a
+ WritableConverter.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setCallSite(java.lang.String)">setCallSite</a></strong>(String&nbsp;shortCallSite)</code>
+<div class="block">Set the thread-local property for overriding the call sites
+ of actions and RDDs.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setCheckpointDir(java.lang.String)">setCheckpointDir</a></strong>(String&nbsp;directory)</code>
+<div class="block">Set the directory under which RDDs are going to be checkpointed.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setJobDescription(java.lang.String)">setJobDescription</a></strong>(String&nbsp;value)</code>
+<div class="block">Set a human readable description of the current job.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setJobGroup(java.lang.String,%20java.lang.String,%20boolean)">setJobGroup</a></strong>(String&nbsp;groupId,
+           String&nbsp;description,
+           boolean&nbsp;interruptOnCancel)</code>
+<div class="block">Assigns a group ID to all the jobs started by this thread until the group ID is set to a
+ different value or cleared.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setLocalProperty(java.lang.String,%20java.lang.String)">setLocalProperty</a></strong>(String&nbsp;key,
+                String&nbsp;value)</code>
+<div class="block">Set a local property that affects jobs submitted from this thread, such as the
+ Spark fair scheduler pool.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#setLogLevel(java.lang.String)">setLogLevel</a></strong>(String&nbsp;logLevel)</code>
+<div class="block">Control our logLevel.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SPARK_JOB_DESCRIPTION()">SPARK_JOB_DESCRIPTION</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SPARK_JOB_GROUP_ID()">SPARK_JOB_GROUP_ID</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#SPARK_JOB_INTERRUPT_ON_CANCEL()">SPARK_JOB_INTERRUPT_ON_CANCEL</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#sparkUser()">sparkUser</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>long</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#startTime()">startTime</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/SparkStatusTracker.html" title="class in org.apache.spark">SparkStatusTracker</a></code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#statusTracker()">statusTracker</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>void</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#stop()">stop</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>java.util.concurrent.atomic.AtomicBoolean</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#stopped()">stopped</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>static org.apache.hadoop.io.Text</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#stringToText(java.lang.String)">stringToText</a></strong>(String&nbsp;s)</code>&nbsp;</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static org.apache.spark.WritableConverter&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#stringWritableConverter()">stringWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T,U,R&gt;&nbsp;<a href="../../../org/apache/spark/SimpleFutureAction.html" title="class in org.apache.spark">SimpleFutureAction</a>&lt;R&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#submitJob(org.apache.spark.rdd.RDD,%20scala.Function1,%20scala.collection.Seq,%20scala.Function2,%20scala.Function0)">submitJob</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+         scala.Function1&lt;scala.collection.Iterator&lt;T&gt;,U&gt;&nbsp;processPartition,
+         scala.collection.Seq&lt;Object&gt;&nbsp;partitions,
+         scala.Function2&lt;Object,U,scala.runtime.BoxedUnit&gt;&nbsp;resultHandler,
+         scala.Function0&lt;R&gt;&nbsp;resultFunc)</code>
+<div class="block">Submit a job for execution and return a FutureJob holding the result.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#tachyonFolderName()">tachyonFolderName</a></strong>()</code>&nbsp;</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;String&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#textFile(java.lang.String,%20int)">textFile</a></strong>(String&nbsp;path,
+        int&nbsp;minPartitions)</code>
+<div class="block">Read a text file from HDFS, a local file system (available on all nodes), or any
+ Hadoop-supported file system URI, and return it as an RDD of Strings.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#union(org.apache.spark.rdd.RDD,%20scala.collection.Seq,%20scala.reflect.ClassTag)">union</a></strong>(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;first,
+     scala.collection.Seq&lt;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&gt;&nbsp;rest,
+     scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$7)</code>
+<div class="block">Build the union of a list of RDDs passed as variable-length arguments.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code>&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#union(scala.collection.Seq,%20scala.reflect.ClassTag)">union</a></strong>(scala.collection.Seq&lt;<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&gt;&nbsp;rdds,
+     scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$6)</code>
+<div class="block">Build the union of a list of RDDs.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>String</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#version()">version</a></strong>()</code>
+<div class="block">The version of Spark on which this application is running.</div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><code><a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;String,String&gt;&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#wholeTextFiles(java.lang.String,%20int)">wholeTextFiles</a></strong>(String&nbsp;path,
+              int&nbsp;minPartitions)</code>
+<div class="block">Read a directory of text files from HDFS, a local file system (available on all nodes), or any
+ Hadoop-supported file system URI.</div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><code>static &lt;T extends org.apache.hadoop.io.Writable&gt;&nbsp;<br>org.apache.spark.WritableConverter&lt;T&gt;</code></td>
+<td class="colLast"><code><strong><a href="../../../org/apache/spark/SparkContext.html#writableWritableConverter()">writableWritableConverter</a></strong>()</code>&nbsp;</td>
+</tr>
+</table>
+<ul class="blockList">
+<li class="blockList"><a name="methods_inherited_from_class_Object">
+<!--   -->
+</a>
+<h3>Methods inherited from class&nbsp;Object</h3>
+<code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
+</ul>
+<ul class="blockList">
+<li class="blockList"><a name="methods_inherited_from_class_org.apache.spark.Logging">
+<!--   -->
+</a>
+<h3>Methods inherited from interface&nbsp;org.apache.spark.<a href="../../../org/apache/spark/Logging.html" title="interface in org.apache.spark">Logging</a></h3>
+<code><a href="../../../org/apache/spark/Logging.html#initializeIfNecessary()">initializeIfNecessary</a>, <a href="../../../org/apache/spark/Logging.html#initializeLogging()">initializeLogging</a>, <a href="../../../org/apache/spark/Logging.html#isTraceEnabled()">isTraceEnabled</a>, <a href="../../../org/apache/spark/Logging.html#log_()">log_</a>, <a href="../../../org/apache/spark/Logging.html#log()">log</a>, <a href="../../../org/apache/spark/Logging.html#logDebug(scala.Function0)">logDebug</a>, <a href="../../../org/apache/spark/Logging.html#logDebug(scala.Function0,%20java.lang.Throwable)">logDebug</a>, <a href="../../../org/apache/spark/Logging.html#logError(scala.Function0)">logError</a>, <a href="../../../org/apache/spark/Logging.html#logError(scala.Function0,%20java.lang.Throwable)">logError</a>, <a href="../../../org/apache/spark/Logging.html#logInfo(scala.Function0)">logInfo</a>, <a href="../../../org/apache/spark/Logging.html#logInfo(scala.Function0,%20java.lang.Throwable
 )">logInfo</a>, <a href="../../../org/apache/spark/Logging.html#logName()">logName</a>, <a href="../../../org/apache/spark/Logging.html#logTrace(scala.Function0)">logTrace</a>, <a href="../../../org/apache/spark/Logging.html#logTrace(scala.Function0,%20java.lang.Throwable)">logTrace</a>, <a href="../../../org/apache/spark/Logging.html#logWarning(scala.Function0)">logWarning</a>, <a href="../../../org/apache/spark/Logging.html#logWarning(scala.Function0,%20java.lang.Throwable)">logWarning</a></code></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="details">
+<ul class="blockList">
+<li class="blockList">
+<!-- ========= CONSTRUCTOR DETAIL ======== -->
+<ul class="blockList">
+<li class="blockList"><a name="constructor_detail">
+<!--   -->
+</a>
+<h3>Constructor Detail</h3>
+<a name="SparkContext(org.apache.spark.SparkConf)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SparkContext</h4>
+<pre>public&nbsp;SparkContext(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config)</pre>
+</li>
+</ul>
+<a name="SparkContext()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SparkContext</h4>
+<pre>public&nbsp;SparkContext()</pre>
+<div class="block">Create a SparkContext that loads settings from system properties (for instance, when
+ launching with ./bin/spark-submit).</div>
+</li>
+</ul>
+<a name="SparkContext(org.apache.spark.SparkConf, scala.collection.Map)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SparkContext</h4>
+<pre>public&nbsp;SparkContext(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config,
+            scala.collection.Map&lt;String,scala.collection.Set&lt;<a href="../../../org/apache/spark/scheduler/SplitInfo.html" title="class in org.apache.spark.scheduler">SplitInfo</a>&gt;&gt;&nbsp;preferredNodeLocationData)</pre>
+<div class="block">:: DeveloperApi ::
+ Alternative constructor for setting preferred locations where Spark will create executors.
+ <p></div>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>config</code> - a <a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark"><code>SparkConf</code></a> object specifying other Spark parameters</dd><dd><code>preferredNodeLocationData</code> - not used. Left for backward compatibility.</dd></dl>
+</li>
+</ul>
+<a name="SparkContext(java.lang.String, java.lang.String, org.apache.spark.SparkConf)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SparkContext</h4>
+<pre>public&nbsp;SparkContext(String&nbsp;master,
+            String&nbsp;appName,
+            <a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;conf)</pre>
+<div class="block">Alternative constructor that allows setting common Spark properties directly
+ <p></div>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>master</code> - Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).</dd><dd><code>appName</code> - A name for your application, to display on the cluster web UI</dd><dd><code>conf</code> - a <a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark"><code>SparkConf</code></a> object specifying other Spark parameters</dd></dl>
+</li>
+</ul>
+<a name="SparkContext(java.lang.String, java.lang.String, java.lang.String, scala.collection.Seq, scala.collection.Map, scala.collection.Map)">
+<!--   -->
+</a>
+<ul class="blockListLast">
+<li class="blockList">
+<h4>SparkContext</h4>
+<pre>public&nbsp;SparkContext(String&nbsp;master,
+            String&nbsp;appName,
+            String&nbsp;sparkHome,
+            scala.collection.Seq&lt;String&gt;&nbsp;jars,
+            scala.collection.Map&lt;String,String&gt;&nbsp;environment,
+            scala.collection.Map&lt;String,scala.collection.Set&lt;<a href="../../../org/apache/spark/scheduler/SplitInfo.html" title="class in org.apache.spark.scheduler">SplitInfo</a>&gt;&gt;&nbsp;preferredNodeLocationData)</pre>
+<div class="block">Alternative constructor that allows setting common Spark properties directly
+ <p></div>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>master</code> - Cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).</dd><dd><code>appName</code> - A name for your application, to display on the cluster web UI.</dd><dd><code>sparkHome</code> - Location where Spark is installed on cluster nodes.</dd><dd><code>jars</code> - Collection of JARs to send to the cluster. These can be paths on the local file
+             system or HDFS, HTTP, HTTPS, or FTP URLs.</dd><dd><code>environment</code> - Environment variables to set on worker nodes.</dd><dd><code>preferredNodeLocationData</code> - not used. Left for backward compatibility.</dd></dl>
+</li>
+</ul>
+</li>
+</ul>
+<!-- ============ METHOD DETAIL ========== -->
+<ul class="blockList">
+<li class="blockList"><a name="method_detail">
+<!--   -->
+</a>
+<h3>Method Detail</h3>
+<a name="getOrCreate(org.apache.spark.SparkConf)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getOrCreate</h4>
+<pre>public static&nbsp;<a href="../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;getOrCreate(<a href="../../../org/apache/spark/SparkConf.html" title="class in org.apache.spark">SparkConf</a>&nbsp;config)</pre>
+<div class="block">This function may be used to get or instantiate a SparkContext and register it as a
+ singleton object. Because we can only have one active SparkContext per JVM,
+ this is useful when applications may wish to share a SparkContext.
+ <p>
+ Note: This function cannot be used to create multiple SparkContext instances
+ even if multiple contexts are allowed.</div>
+<dl><dt><span class="strong">Parameters:</span></dt><dd><code>config</code> - (undocumented)</dd>
+<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl>
+</li>
+</ul>
+<a name="getOrCreate()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>getOrCreate</h4>
+<pre>public static&nbsp;<a href="../../../org/apache/spark/SparkContext.html" title="class in org.apache.spark">SparkContext</a>&nbsp;getOrCreate()</pre>
+<div class="block">This function may be used to get or instantiate a SparkContext and register it as a
+ singleton object. Because we can only have one active SparkContext per JVM,
+ this is useful when applications may wish to share a SparkContext.
+ <p>
+ This method allows not passing a SparkConf (useful if just retrieving).
+ <p>
+ Note: This function cannot be used to create multiple SparkContext instances
+ even if multiple contexts are allowed.</div>
+<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl>
+</li>
+</ul>
+<a name="SPARK_JOB_DESCRIPTION()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SPARK_JOB_DESCRIPTION</h4>
+<pre>public static&nbsp;String&nbsp;SPARK_JOB_DESCRIPTION()</pre>
+</li>
+</ul>
+<a name="SPARK_JOB_GROUP_ID()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SPARK_JOB_GROUP_ID</h4>
+<pre>public static&nbsp;String&nbsp;SPARK_JOB_GROUP_ID()</pre>
+</li>
+</ul>
+<a name="SPARK_JOB_INTERRUPT_ON_CANCEL()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>SPARK_JOB_INTERRUPT_ON_CANCEL</h4>
+<pre>public static&nbsp;String&nbsp;SPARK_JOB_INTERRUPT_ON_CANCEL()</pre>
+</li>
+</ul>
+<a name="RDD_SCOPE_KEY()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>RDD_SCOPE_KEY</h4>
+<pre>public static&nbsp;String&nbsp;RDD_SCOPE_KEY()</pre>
+</li>
+</ul>
+<a name="RDD_SCOPE_NO_OVERRIDE_KEY()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>RDD_SCOPE_NO_OVERRIDE_KEY</h4>
+<pre>public static&nbsp;String&nbsp;RDD_SCOPE_NO_OVERRIDE_KEY()</pre>
+</li>
+</ul>
+<a name="DRIVER_IDENTIFIER()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>DRIVER_IDENTIFIER</h4>
+<pre>public static&nbsp;String&nbsp;DRIVER_IDENTIFIER()</pre>
+<div class="block">Executor id for the driver.  In earlier versions of Spark, this was <code><driver></code>, but this was
+ changed to <code>driver</code> because the angle brackets caused escaping issues in URLs and XML (see
+ SPARK-6716 for more details).</div>
+<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl>
+</li>
+</ul>
+<a name="LEGACY_DRIVER_IDENTIFIER()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>LEGACY_DRIVER_IDENTIFIER</h4>
+<pre>public static&nbsp;String&nbsp;LEGACY_DRIVER_IDENTIFIER()</pre>
+<div class="block">Legacy version of DRIVER_IDENTIFIER, retained for backwards-compatibility.</div>
+<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl>
+</li>
+</ul>
+<a name="rddToPairRDDFunctions(org.apache.spark.rdd.RDD, scala.reflect.ClassTag, scala.reflect.ClassTag, scala.math.Ordering)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>rddToPairRDDFunctions</h4>
+<pre>public static&nbsp;&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/PairRDDFunctions.html" title="class in org.apache.spark.rdd">PairRDDFunctions</a>&lt;K,V&gt;&nbsp;rddToPairRDDFunctions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                                                scala.reflect.ClassTag&lt;K&gt;&nbsp;kt,
+                                                scala.reflect.ClassTag&lt;V&gt;&nbsp;vt,
+                                                scala.math.Ordering&lt;K&gt;&nbsp;ord)</pre>
+</li>
+</ul>
+<a name="rddToAsyncRDDActions(org.apache.spark.rdd.RDD, scala.reflect.ClassTag)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>rddToAsyncRDDActions</h4>
+<pre>public static&nbsp;&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/AsyncRDDActions.html" title="class in org.apache.spark.rdd">AsyncRDDActions</a>&lt;T&gt;&nbsp;rddToAsyncRDDActions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+                                          scala.reflect.ClassTag&lt;T&gt;&nbsp;evidence$22)</pre>
+</li>
+</ul>
+<a name="rddToSequenceFileRDDFunctions(org.apache.spark.rdd.RDD, scala.Function1, scala.reflect.ClassTag, scala.Function1, scala.reflect.ClassTag)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>rddToSequenceFileRDDFunctions</h4>
+<pre>public static&nbsp;&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/SequenceFileRDDFunctions.html" title="class in org.apache.spark.rdd">SequenceFileRDDFunctions</a>&lt;K,V&gt;&nbsp;rddToSequenceFileRDDFunctions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                                                                scala.Function1&lt;K,org.apache.hadoop.io.Writable&gt;&nbsp;evidence$23,
+                                                                scala.reflect.ClassTag&lt;K&gt;&nbsp;evidence$24,
+                                                                scala.Function1&lt;V,org.apache.hadoop.io.Writable&gt;&nbsp;evidence$25,
+                                                                scala.reflect.ClassTag&lt;V&gt;&nbsp;evidence$26)</pre>
+</li>
+</ul>
+<a name="rddToOrderedRDDFunctions(org.apache.spark.rdd.RDD, scala.math.Ordering, scala.reflect.ClassTag, scala.reflect.ClassTag)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>rddToOrderedRDDFunctions</h4>
+<pre>public static&nbsp;&lt;K,V&gt;&nbsp;<a href="../../../org/apache/spark/rdd/OrderedRDDFunctions.html" title="class in org.apache.spark.rdd">OrderedRDDFunctions</a>&lt;K,V,scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rddToOrderedRDDFunctions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;scala.Tuple2&lt;K,V&gt;&gt;&nbsp;rdd,
+                                                                        scala.math.Ordering&lt;K&gt;&nbsp;evidence$27,
+                                                                        scala.reflect.ClassTag&lt;K&gt;&nbsp;evidence$28,
+                                                                        scala.reflect.ClassTag&lt;V&gt;&nbsp;evidence$29)</pre>
+</li>
+</ul>
+<a name="doubleRDDToDoubleRDDFunctions(org.apache.spark.rdd.RDD)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>doubleRDDToDoubleRDDFunctions</h4>
+<pre>public static&nbsp;<a href="../../../org/apache/spark/rdd/DoubleRDDFunctions.html" title="class in org.apache.spark.rdd">DoubleRDDFunctions</a>&nbsp;doubleRDDToDoubleRDDFunctions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;Object&gt;&nbsp;rdd)</pre>
+</li>
+</ul>
+<a name="numericRDDToDoubleRDDFunctions(org.apache.spark.rdd.RDD, scala.math.Numeric)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>numericRDDToDoubleRDDFunctions</h4>
+<pre>public static&nbsp;&lt;T&gt;&nbsp;<a href="../../../org/apache/spark/rdd/DoubleRDDFunctions.html" title="class in org.apache.spark.rdd">DoubleRDDFunctions</a>&nbsp;numericRDDToDoubleRDDFunctions(<a href="../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a>&lt;T&gt;&nbsp;rdd,
+                                                    scala.math.Numeric&lt;T&gt;&nbsp;num)</pre>
+</li>
+</ul>
+<a name="intToIntWritable(int)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>intToIntWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.IntWritable&nbsp;intToIntWritable(int&nbsp;i)</pre>
+</li>
+</ul>
+<a name="longToLongWritable(long)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>longToLongWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.LongWritable&nbsp;longToLongWritable(long&nbsp;l)</pre>
+</li>
+</ul>
+<a name="floatToFloatWritable(float)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>floatToFloatWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.FloatWritable&nbsp;floatToFloatWritable(float&nbsp;f)</pre>
+</li>
+</ul>
+<a name="doubleToDoubleWritable(double)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>doubleToDoubleWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.DoubleWritable&nbsp;doubleToDoubleWritable(double&nbsp;d)</pre>
+</li>
+</ul>
+<a name="boolToBoolWritable(boolean)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>boolToBoolWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.BooleanWritable&nbsp;boolToBoolWritable(boolean&nbsp;b)</pre>
+</li>
+</ul>
+<a name="bytesToBytesWritable(byte[])">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>bytesToBytesWritable</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.BytesWritable&nbsp;bytesToBytesWritable(byte[]&nbsp;aob)</pre>
+</li>
+</ul>
+<a name="stringToText(java.lang.String)">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>stringToText</h4>
+<pre>public static&nbsp;org.apache.hadoop.io.Text&nbsp;stringToText(String&nbsp;s)</pre>
+</li>
+</ul>
+<a name="intWritableConverter()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>intWritableConverter</h4>
+<pre>public static&nbsp;org.apache.spark.WritableConverter&lt;Object&gt;&nbsp;intWritableConverter()</pre>
+</li>
+</ul>
+<a name="longWritableConverter()">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<h4>longW

<TRUNCATED>

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org