You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/11/06 07:20:03 UTC

[34/51] [partial] spark-website git commit: Add 1.6.3 doc.

http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/constant-values.html
----------------------------------------------------------------------
diff --git a/site/docs/1.6.3/api/java/constant-values.html b/site/docs/1.6.3/api/java/constant-values.html
new file mode 100644
index 0000000..d193189
--- /dev/null
+++ b/site/docs/1.6.3/api/java/constant-values.html
@@ -0,0 +1,219 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:22 PDT 2016 -->
+<title>Constant Field Values (Spark 1.6.3 JavaDoc)</title>
+<meta name="date" content="2016-11-02">
+<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
+</head>
+<body>
+<script type="text/javascript"><!--
+    if (location.href.indexOf('is-external=true') == -1) {
+        parent.document.title="Constant Field Values (Spark 1.6.3 JavaDoc)";
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar_top">
+<!--   -->
+</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li><a href="deprecated-list.html">Deprecated</a></li>
+<li><a href="index-all.html">Index</a></li>
+<li><a href="help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?constant-values.html" target="_top">Frames</a></li>
+<li><a href="constant-values.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<div class="header">
+<h1 title="Constant Field Values" class="title">Constant Field Values</h1>
+<h2 title="Contents">Contents</h2>
+<ul>
+<li><a href="#org.apache">org.apache.*</a></li>
+</ul>
+</div>
+<div class="constantValuesContainer"><a name="org.apache">
+<!--   -->
+</a>
+<h2 title="org.apache">org.apache.*</h2>
+<ul class="blockList">
+<li class="blockList">
+<table border="0" cellpadding="3" cellspacing="0" summary="Constant Field Values table, listing constant fields, and values">
+<caption><span>org.apache.spark.launcher.<a href="org/apache/spark/launcher/SparkLauncher.html" title="class in org.apache.spark.launcher">SparkLauncher</a></span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th scope="col">Constant Field</th>
+<th class="colLast" scope="col">Value</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.CHILD_CONNECTION_TIMEOUT">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#CHILD_CONNECTION_TIMEOUT">CHILD_CONNECTION_TIMEOUT</a></code></td>
+<td class="colLast"><code>"spark.launcher.childConectionTimeout"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.CHILD_PROCESS_LOGGER_NAME">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#CHILD_PROCESS_LOGGER_NAME">CHILD_PROCESS_LOGGER_NAME</a></code></td>
+<td class="colLast"><code>"spark.launcher.childProcLoggerName"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.DRIVER_EXTRA_CLASSPATH">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#DRIVER_EXTRA_CLASSPATH">DRIVER_EXTRA_CLASSPATH</a></code></td>
+<td class="colLast"><code>"spark.driver.extraClassPath"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#DRIVER_EXTRA_JAVA_OPTIONS">DRIVER_EXTRA_JAVA_OPTIONS</a></code></td>
+<td class="colLast"><code>"spark.driver.extraJavaOptions"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#DRIVER_EXTRA_LIBRARY_PATH">DRIVER_EXTRA_LIBRARY_PATH</a></code></td>
+<td class="colLast"><code>"spark.driver.extraLibraryPath"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.DRIVER_MEMORY">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#DRIVER_MEMORY">DRIVER_MEMORY</a></code></td>
+<td class="colLast"><code>"spark.driver.memory"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.EXECUTOR_CORES">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#EXECUTOR_CORES">EXECUTOR_CORES</a></code></td>
+<td class="colLast"><code>"spark.executor.cores"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.EXECUTOR_EXTRA_CLASSPATH">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#EXECUTOR_EXTRA_CLASSPATH">EXECUTOR_EXTRA_CLASSPATH</a></code></td>
+<td class="colLast"><code>"spark.executor.extraClassPath"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.EXECUTOR_EXTRA_JAVA_OPTIONS">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#EXECUTOR_EXTRA_JAVA_OPTIONS">EXECUTOR_EXTRA_JAVA_OPTIONS</a></code></td>
+<td class="colLast"><code>"spark.executor.extraJavaOptions"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.EXECUTOR_EXTRA_LIBRARY_PATH">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#EXECUTOR_EXTRA_LIBRARY_PATH">EXECUTOR_EXTRA_LIBRARY_PATH</a></code></td>
+<td class="colLast"><code>"spark.executor.extraLibraryPath"</code></td>
+</tr>
+<tr class="altColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.EXECUTOR_MEMORY">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#EXECUTOR_MEMORY">EXECUTOR_MEMORY</a></code></td>
+<td class="colLast"><code>"spark.executor.memory"</code></td>
+</tr>
+<tr class="rowColor">
+<td class="colFirst"><a name="org.apache.spark.launcher.SparkLauncher.SPARK_MASTER">
+<!--   -->
+</a><code>public&nbsp;static&nbsp;final&nbsp;String</code></td>
+<td><code><a href="org/apache/spark/launcher/SparkLauncher.html#SPARK_MASTER">SPARK_MASTER</a></code></td>
+<td class="colLast"><code>"spark.master"</code></td>
+</tr>
+</tbody>
+</table>
+</li>
+</ul>
+</div>
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar_bottom">
+<!--   -->
+</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li><a href="deprecated-list.html">Deprecated</a></li>
+<li><a href="index-all.html">Index</a></li>
+<li><a href="help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?constant-values.html" target="_top">Frames</a></li>
+<li><a href="constant-values.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<script defer="defer" type="text/javascript" src="lib/jquery.js"></script><script defer="defer" type="text/javascript" src="lib/api-javadocs.js"></script></body>
+</html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/deprecated-list.html
----------------------------------------------------------------------
diff --git a/site/docs/1.6.3/api/java/deprecated-list.html b/site/docs/1.6.3/api/java/deprecated-list.html
new file mode 100644
index 0000000..a7b2377
--- /dev/null
+++ b/site/docs/1.6.3/api/java/deprecated-list.html
@@ -0,0 +1,584 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:23 PDT 2016 -->
+<title>Deprecated List (Spark 1.6.3 JavaDoc)</title>
+<meta name="date" content="2016-11-02">
+<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
+</head>
+<body>
+<script type="text/javascript"><!--
+    if (location.href.indexOf('is-external=true') == -1) {
+        parent.document.title="Deprecated List (Spark 1.6.3 JavaDoc)";
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar_top">
+<!--   -->
+</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li class="navBarCell1Rev">Deprecated</li>
+<li><a href="index-all.html">Index</a></li>
+<li><a href="help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?deprecated-list.html" target="_top">Frames</a></li>
+<li><a href="deprecated-list.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<div class="header">
+<h1 title="Deprecated API" class="title">Deprecated API</h1>
+<h2 title="Contents">Contents</h2>
+<ul>
+<li><a href="#method">Deprecated Methods</a></li>
+</ul>
+</div>
+<div class="contentContainer"><a name="method">
+<!--   -->
+</a>
+<ul class="blockList">
+<li class="blockList">
+<table border="0" cellpadding="3" cellspacing="0" summary="Deprecated Methods table, listing deprecated methods, and an explanation">
+<caption><span>Deprecated Methods</span><span class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colOne" scope="col">Method and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/StreamingContext.html#awaitTermination(long)">org.apache.spark.streaming.StreamingContext.awaitTermination(long)</a>
+<div class="block"><i>As of 1.3.0, replaced by <code>awaitTerminationOrTimeout(Long)</code>.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaStreamingContext.html#awaitTermination(long)">org.apache.spark.streaming.api.java.JavaStreamingContext.awaitTermination(long)</a>
+<div class="block"><i>As of 1.3.0, replaced by <code>awaitTerminationOrTimeout(Long)</code>.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function0,%20org.apache.spark.sql.types.DataType)">org.apache.spark.sql.functions.callUDF(Function0<?>, DataType)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function1,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function1<?, ?>, DataType, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function10,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function10<?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf().
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function2,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function2<?, ?, ?>, DataType, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function3,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function3<?, ?, ?, ?>, DataType, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function4,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function4<?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function5,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function5<?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function6,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function6<?, ?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function7,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function7<?, ?, ?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function8,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function8<?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf()
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUDF(scala.Function9,%20org.apache.spark.sql.types.DataType,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column,%20org.apache.spark.sql.Column)">org.apache.spark.sql.functions.callUDF(Function9<?, ?, ?, ?, ?, ?, ?, ?, ?, ?>, DataType, Column, Column, Column, Column, Column, Column, Column, Column, Column)</a>
+<div class="block"><i>As of 1.5.0, since it's redundant with udf().
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#callUdf(java.lang.String,%20scala.collection.Seq)">org.apache.spark.sql.functions.callUdf(String, Seq<Column>)</a>
+<div class="block"><i>As of 1.5.0, since it was not coherent to have two functions callUdf and callUDF.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/api/java/StorageLevels.html#create(boolean,%20boolean,%20boolean,%20int)">org.apache.spark.api.java.StorageLevels.create(boolean, boolean, boolean, int)</a></td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#createJDBCTable(java.lang.String,%20java.lang.String,%20boolean)">org.apache.spark.sql.DataFrame.createJDBCTable(String, String, boolean)</a>
+<div class="block"><i>As of 1.340, replaced by <code>write().jdbc()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#cumeDist()">org.apache.spark.sql.functions.cumeDist()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>cume_dist</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/api/java/JavaSparkContext.html#defaultMinSplits()">org.apache.spark.api.java.JavaSparkContext.defaultMinSplits()</a>
+<div class="block"><i>As of Spark 1.0.0, defaultMinSplits is deprecated, use
+            <a href="org/apache/spark/api/java/JavaSparkContext.html#defaultMinPartitions()"><code>JavaSparkContext.defaultMinPartitions()</code></a> instead</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#denseRank()">org.apache.spark.sql.functions.denseRank()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>dense_rank</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaDStreamLike.html#foreach(org.apache.spark.api.java.function.Function)">org.apache.spark.streaming.api.java.JavaDStreamLike.foreach(Function<R, Void>)</a>
+<div class="block"><i>As of release 0.9.0, replaced by foreachRDD</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/dstream/DStream.html#foreach(scala.Function1)">org.apache.spark.streaming.dstream.DStream.foreach(Function1<RDD<T>, BoxedUnit>)</a>
+<div class="block"><i>As of 0.9.0, replaced by <code>foreachRDD</code>.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/dstream/DStream.html#foreach(scala.Function2)">org.apache.spark.streaming.dstream.DStream.foreach(Function2<RDD<T>, Time, BoxedUnit>)</a>
+<div class="block"><i>As of 0.9.0, replaced by <code>foreachRDD</code>.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaDStreamLike.html#foreach(org.apache.spark.api.java.function.Function2)">org.apache.spark.streaming.api.java.JavaDStreamLike.foreach(Function2<R, Time, Void>)</a>
+<div class="block"><i>As of release 0.9.0, replaced by foreachRDD</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaDStreamLike.html#foreachRDD(org.apache.spark.api.java.function.Function)">org.apache.spark.streaming.api.java.JavaDStreamLike.foreachRDD(Function<R, Void>)</a>
+<div class="block"><i>As of release 1.6.0, replaced by foreachRDD(JVoidFunction)</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaDStreamLike.html#foreachRDD(org.apache.spark.api.java.function.Function2)">org.apache.spark.streaming.api.java.JavaDStreamLike.foreachRDD(Function2<R, Time, Void>)</a>
+<div class="block"><i>As of release 1.6.0, replaced by foreachRDD(JVoidFunction2)</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/types/DataType.html#fromCaseClassString(java.lang.String)">org.apache.spark.sql.types.DataType.fromCaseClassString(String)</a>
+<div class="block"><i>As of 1.2.0, replaced by <code>DataType.fromJson()</code></i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaStreamingContext.html#getOrCreate(java.lang.String,%20org.apache.hadoop.conf.Configuration,%20org.apache.spark.streaming.api.java.JavaStreamingContextFactory)">org.apache.spark.streaming.api.java.JavaStreamingContext.getOrCreate(String, Configuration, JavaStreamingContextFactory)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>getOrCreate</code> without JavaStreamingContextFactory.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaStreamingContext.html#getOrCreate(java.lang.String,%20org.apache.hadoop.conf.Configuration,%20org.apache.spark.streaming.api.java.JavaStreamingContextFactory,%20boolean)">org.apache.spark.streaming.api.java.JavaStreamingContext.getOrCreate(String, Configuration, JavaStreamingContextFactory, boolean)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>getOrCreate</code> without JavaStreamingContextFactory.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaStreamingContext.html#getOrCreate(java.lang.String,%20org.apache.spark.streaming.api.java.JavaStreamingContextFactory)">org.apache.spark.streaming.api.java.JavaStreamingContext.getOrCreate(String, JavaStreamingContextFactory)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>getOrCreate</code> without JavaStreamingContextFactory.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/Column.html#in(java.lang.Object...)">org.apache.spark.sql.Column.in(Object...)</a>
+<div class="block"><i>As of 1.5.0. Use isin. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/Column.html#in(scala.collection.Seq)">org.apache.spark.sql.Column.in(Seq<Object>)</a>
+<div class="block"><i>As of 1.5.0. Use isin. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#inputFileName()">org.apache.spark.sql.functions.inputFileName()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>input_file_name</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#insertInto(java.lang.String)">org.apache.spark.sql.DataFrame.insertInto(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().mode(SaveMode.Append).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#insertInto(java.lang.String,%20boolean)">org.apache.spark.sql.DataFrame.insertInto(String, boolean)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().mode(SaveMode.Append|SaveMode.Overwrite).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#insertIntoJDBC(java.lang.String,%20java.lang.String,%20boolean)">org.apache.spark.sql.DataFrame.insertIntoJDBC(String, String, boolean)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().jdbc()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#isNaN(org.apache.spark.sql.Column)">org.apache.spark.sql.functions.isNaN(Column)</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>isnan</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jdbc(java.lang.String,%20java.lang.String)">org.apache.spark.sql.SQLContext.jdbc(String, String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().jdbc()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jdbc(java.lang.String,%20java.lang.String,%20java.lang.String[])">org.apache.spark.sql.SQLContext.jdbc(String, String, String[])</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().jdbc()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jdbc(java.lang.String,%20java.lang.String,%20java.lang.String,%20long,%20long,%20int)">org.apache.spark.sql.SQLContext.jdbc(String, String, String, long, long, int)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().jdbc()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonFile(java.lang.String)">org.apache.spark.sql.SQLContext.jsonFile(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonFile(java.lang.String,%20double)">org.apache.spark.sql.SQLContext.jsonFile(String, double)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonFile(java.lang.String,%20org.apache.spark.sql.types.StructType)">org.apache.spark.sql.SQLContext.jsonFile(String, StructType)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.api.java.JavaRDD)">org.apache.spark.sql.SQLContext.jsonRDD(JavaRDD<String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.api.java.JavaRDD,%20double)">org.apache.spark.sql.SQLContext.jsonRDD(JavaRDD<String>, double)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.api.java.JavaRDD,%20org.apache.spark.sql.types.StructType)">org.apache.spark.sql.SQLContext.jsonRDD(JavaRDD<String>, StructType)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.rdd.RDD)">org.apache.spark.sql.SQLContext.jsonRDD(RDD<String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.rdd.RDD,%20double)">org.apache.spark.sql.SQLContext.jsonRDD(RDD<String>, double)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#jsonRDD(org.apache.spark.rdd.RDD,%20org.apache.spark.sql.types.StructType)">org.apache.spark.sql.SQLContext.jsonRDD(RDD<String>, StructType)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().json()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String)">org.apache.spark.sql.SQLContext.load(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().load(path)</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String,%20java.util.Map)">org.apache.spark.sql.SQLContext.load(String, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().format(source).options(options).load()</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String,%20scala.collection.immutable.Map)">org.apache.spark.sql.SQLContext.load(String, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().format(source).options(options).load()</code>.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String,%20java.lang.String)">org.apache.spark.sql.SQLContext.load(String, String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().format(source).load(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String,%20org.apache.spark.sql.types.StructType,%20java.util.Map)">org.apache.spark.sql.SQLContext.load(String, StructType, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>read().format(source).schema(schema).options(options).load()</code>.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#load(java.lang.String,%20org.apache.spark.sql.types.StructType,%20scala.collection.immutable.Map)">org.apache.spark.sql.SQLContext.load(String, StructType, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>read().format(source).schema(schema).options(options).load()</code>.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/mllib/util/MLUtils.html#loadLabeledData(org.apache.spark.SparkContext,%20java.lang.String)">org.apache.spark.mllib.util.MLUtils.loadLabeledData(SparkContext, String)</a>
+<div class="block"><i>Should use <a href="org/apache/spark/rdd/RDD.html#saveAsTextFile(java.lang.String)"><code>RDD.saveAsTextFile(java.lang.String)</code></a> for saving and
+            <a href="org/apache/spark/mllib/util/MLUtils.html#loadLabeledPoints(org.apache.spark.SparkContext,%20java.lang.String,%20int)"><code>MLUtils.loadLabeledPoints(org.apache.spark.SparkContext, java.lang.String, int)</code></a> for loading.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/StreamingContext.html#networkStream(org.apache.spark.streaming.receiver.Receiver,%20scala.reflect.ClassTag)">org.apache.spark.streaming.StreamingContext.networkStream(Receiver<T>, ClassTag<T>)</a>
+<div class="block"><i>As of 1.0.0", replaced by <code>receiverStream</code>.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/SQLContext.html#parquetFile(java.lang.String...)">org.apache.spark.sql.SQLContext.parquetFile(String...)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>read().parquet()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#percentRank()">org.apache.spark.sql.functions.percentRank()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>percent_rank</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaDStreamLike.html#reduceByWindow(scala.Function2,%20org.apache.spark.streaming.Duration,%20org.apache.spark.streaming.Duration)">org.apache.spark.streaming.api.java.JavaDStreamLike.reduceByWindow(Function2<T, T, T>, Duration, Duration)</a>
+<div class="block"><i>As this API is not Java compatible.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#rowNumber()">org.apache.spark.sql.functions.rowNumber()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>row_number</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String)">org.apache.spark.sql.DataFrame.save(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().save(path)</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String,%20org.apache.spark.sql.SaveMode)">org.apache.spark.sql.DataFrame.save(String, SaveMode)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().mode(mode).save(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String,%20org.apache.spark.sql.SaveMode,%20java.util.Map)">org.apache.spark.sql.DataFrame.save(String, SaveMode, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().format(source).mode(mode).options(options).save(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String,%20org.apache.spark.sql.SaveMode,%20scala.collection.immutable.Map)">org.apache.spark.sql.DataFrame.save(String, SaveMode, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().format(source).mode(mode).options(options).save(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String,%20java.lang.String)">org.apache.spark.sql.DataFrame.save(String, String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().format(source).save(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#save(java.lang.String,%20java.lang.String,%20org.apache.spark.sql.SaveMode)">org.apache.spark.sql.DataFrame.save(String, String, SaveMode)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().format(source).mode(mode).save(path)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsParquetFile(java.lang.String)">org.apache.spark.sql.DataFrame.saveAsParquetFile(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().parquet()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String)">org.apache.spark.sql.DataFrame.saveAsTable(String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String,%20org.apache.spark.sql.SaveMode)">org.apache.spark.sql.DataFrame.saveAsTable(String, SaveMode)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().mode(mode).saveAsTable(tableName)</code>.
+              This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String,%20java.lang.String)">org.apache.spark.sql.DataFrame.saveAsTable(String, String)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().format(source).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String,%20java.lang.String,%20org.apache.spark.sql.SaveMode)">org.apache.spark.sql.DataFrame.saveAsTable(String, String, SaveMode)</a>
+<div class="block"><i>As of 1.4.0, replaced by <code>write().mode(mode).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String,%20java.lang.String,%20org.apache.spark.sql.SaveMode,%20java.util.Map)">org.apache.spark.sql.DataFrame.saveAsTable(String, String, SaveMode, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().format(source).mode(mode).options(options).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#saveAsTable(java.lang.String,%20java.lang.String,%20org.apache.spark.sql.SaveMode,%20scala.collection.immutable.Map)">org.apache.spark.sql.DataFrame.saveAsTable(String, String, SaveMode, Map<String, String>)</a>
+<div class="block"><i>As of 1.4.0, replaced by
+            <code>write().format(source).mode(mode).options(options).saveAsTable(tableName)</code>.
+             This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/mllib/util/MLUtils.html#saveLabeledData(org.apache.spark.rdd.RDD,%20java.lang.String)">org.apache.spark.mllib.util.MLUtils.saveLabeledData(RDD<LabeledPoint>, String)</a>
+<div class="block"><i>Should use <a href="org/apache/spark/rdd/RDD.html#saveAsTextFile(java.lang.String)"><code>RDD.saveAsTextFile(java.lang.String)</code></a> for saving and
+            <a href="org/apache/spark/mllib/util/MLUtils.html#loadLabeledPoints(org.apache.spark.SparkContext,%20java.lang.String,%20int)"><code>MLUtils.loadLabeledPoints(org.apache.spark.SparkContext, java.lang.String, int)</code></a> for loading.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/streaming/api/java/JavaStreamingContext.html#sc()">org.apache.spark.streaming.api.java.JavaStreamingContext.sc()</a>
+<div class="block"><i>As of 0.9.0, replaced by <code>sparkContext</code></i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/mllib/optimization/LBFGS.html#setMaxNumIterations(int)">org.apache.spark.mllib.optimization.LBFGS.setMaxNumIterations(int)</a>
+<div class="block"><i>use <a href="org/apache/spark/mllib/optimization/LBFGS.html#setNumIterations(int)"><code>LBFGS.setNumIterations(int)</code></a> instead</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/ml/evaluation/BinaryClassificationEvaluator.html#setScoreCol(java.lang.String)">org.apache.spark.ml.evaluation.BinaryClassificationEvaluator.setScoreCol(String)</a>
+<div class="block"><i>use <code>setRawPredictionCol()</code> instead</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/sql/functions.html#sparkPartitionId()">org.apache.spark.sql.functions.sparkPartitionId()</a>
+<div class="block"><i>As of 1.6.0, replaced by <code>spark_partition_id</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/api/java/JavaRDDLike.html#toArray()">org.apache.spark.api.java.JavaRDDLike.toArray()</a>
+<div class="block"><i>As of Spark 1.0.0, toArray() is deprecated, use <a href="org/apache/spark/api/java/JavaRDDLike.html#collect()"><code>JavaRDDLike.collect()</code></a> instead</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/streaming/StreamingContext.html#toPairDStreamFunctions(org.apache.spark.streaming.dstream.DStream,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.math.Ordering)">org.apache.spark.streaming.StreamingContext.toPairDStreamFunctions(DStream<Tuple2<K, V>>, ClassTag<K>, ClassTag<V>, Ordering<K>)</a>
+<div class="block"><i>As of 1.3.0, replaced by implicit functions in the DStream companion object.
+             This is kept here only for backward compatibility.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/sql/DataFrame.html#toSchemaRDD()">org.apache.spark.sql.DataFrame.toSchemaRDD()</a>
+<div class="block"><i>As of 1.3.0, replaced by <code>toDF()</code>. This will be removed in Spark 2.0.</i></div>
+</td>
+</tr>
+<tr class="rowColor">
+<td class="colOne"><a href="org/apache/spark/mllib/rdd/RDDFunctions.html#treeAggregate(U,%20scala.Function2,%20scala.Function2,%20int,%20scala.reflect.ClassTag)">org.apache.spark.mllib.rdd.RDDFunctions.treeAggregate(U, Function2<U, T, U>, Function2<U, U, U>, int, ClassTag<U>)</a>
+<div class="block"><i>Use <a href="org/apache/spark/rdd/RDD.html#treeAggregate(U,%20scala.Function2,%20scala.Function2,%20int,%20scala.reflect.ClassTag)"><code>RDD.treeAggregate(U, scala.Function2&lt;U, T, U&gt;, scala.Function2&lt;U, U, U&gt;, int, scala.reflect.ClassTag&lt;U&gt;)</code></a> instead.</i></div>
+</td>
+</tr>
+<tr class="altColor">
+<td class="colOne"><a href="org/apache/spark/mllib/rdd/RDDFunctions.html#treeReduce(scala.Function2,%20int)">org.apache.spark.mllib.rdd.RDDFunctions.treeReduce(Function2<T, T, T>, int)</a>
+<div class="block"><i>Use <a href="org/apache/spark/rdd/RDD.html#treeReduce(scala.Function2,%20int)"><code>RDD.treeReduce(scala.Function2&lt;T, T, T&gt;, int)</code></a> instead.</i></div>
+</td>
+</tr>
+</tbody>
+</table>
+</li>
+</ul>
+</div>
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar_bottom">
+<!--   -->
+</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li class="navBarCell1Rev">Deprecated</li>
+<li><a href="index-all.html">Index</a></li>
+<li><a href="help-doc.html">Help</a></li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?deprecated-list.html" target="_top">Frames</a></li>
+<li><a href="deprecated-list.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<script defer="defer" type="text/javascript" src="lib/jquery.js"></script><script defer="defer" type="text/javascript" src="lib/api-javadocs.js"></script></body>
+</html>

http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/help-doc.html
----------------------------------------------------------------------
diff --git a/site/docs/1.6.3/api/java/help-doc.html b/site/docs/1.6.3/api/java/help-doc.html
new file mode 100644
index 0000000..b177af6
--- /dev/null
+++ b/site/docs/1.6.3/api/java/help-doc.html
@@ -0,0 +1,214 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<!-- NewPage -->
+<html lang="en">
+<head>
+<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:23 PDT 2016 -->
+<title>API Help (Spark 1.6.3 JavaDoc)</title>
+<meta name="date" content="2016-11-02">
+<link rel="stylesheet" type="text/css" href="stylesheet.css" title="Style">
+</head>
+<body>
+<script type="text/javascript"><!--
+    if (location.href.indexOf('is-external=true') == -1) {
+        parent.document.title="API Help (Spark 1.6.3 JavaDoc)";
+    }
+//-->
+</script>
+<noscript>
+<div>JavaScript is disabled on your browser.</div>
+</noscript>
+<!-- ========= START OF TOP NAVBAR ======= -->
+<div class="topNav"><a name="navbar_top">
+<!--   -->
+</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li><a href="deprecated-list.html">Deprecated</a></li>
+<li><a href="index-all.html">Index</a></li>
+<li class="navBarCell1Rev">Help</li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?help-doc.html" target="_top">Frames</a></li>
+<li><a href="help-doc.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_top">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_top");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_top">
+<!--   -->
+</a></div>
+<!-- ========= END OF TOP NAVBAR ========= -->
+<div class="header">
+<h1 class="title">How This API Document Is Organized</h1>
+<div class="subTitle">This API (Application Programming Interface) document has pages corresponding to the items in the navigation bar, described as follows.</div>
+</div>
+<div class="contentContainer">
+<ul class="blockList">
+<li class="blockList">
+<h2>Overview</h2>
+<p>The <a href="overview-summary.html">Overview</a> page is the front page of this API document and provides a list of all packages with a summary for each.  This page can also contain an overall description of the set of packages.</p>
+</li>
+<li class="blockList">
+<h2>Package</h2>
+<p>Each package has a page that contains a list of its classes and interfaces, with a summary for each. This page can contain six categories:</p>
+<ul>
+<li>Interfaces (italic)</li>
+<li>Classes</li>
+<li>Enums</li>
+<li>Exceptions</li>
+<li>Errors</li>
+<li>Annotation Types</li>
+</ul>
+</li>
+<li class="blockList">
+<h2>Class/Interface</h2>
+<p>Each class, interface, nested class and nested interface has its own separate page. Each of these pages has three sections consisting of a class/interface description, summary tables, and detailed member descriptions:</p>
+<ul>
+<li>Class inheritance diagram</li>
+<li>Direct Subclasses</li>
+<li>All Known Subinterfaces</li>
+<li>All Known Implementing Classes</li>
+<li>Class/interface declaration</li>
+<li>Class/interface description</li>
+</ul>
+<ul>
+<li>Nested Class Summary</li>
+<li>Field Summary</li>
+<li>Constructor Summary</li>
+<li>Method Summary</li>
+</ul>
+<ul>
+<li>Field Detail</li>
+<li>Constructor Detail</li>
+<li>Method Detail</li>
+</ul>
+<p>Each summary entry contains the first sentence from the detailed description for that item. The summary entries are alphabetical, while the detailed descriptions are in the order they appear in the source code. This preserves the logical groupings established by the programmer.</p>
+</li>
+<li class="blockList">
+<h2>Annotation Type</h2>
+<p>Each annotation type has its own separate page with the following sections:</p>
+<ul>
+<li>Annotation Type declaration</li>
+<li>Annotation Type description</li>
+<li>Required Element Summary</li>
+<li>Optional Element Summary</li>
+<li>Element Detail</li>
+</ul>
+</li>
+<li class="blockList">
+<h2>Enum</h2>
+<p>Each enum has its own separate page with the following sections:</p>
+<ul>
+<li>Enum declaration</li>
+<li>Enum description</li>
+<li>Enum Constant Summary</li>
+<li>Enum Constant Detail</li>
+</ul>
+</li>
+<li class="blockList">
+<h2>Tree (Class Hierarchy)</h2>
+<p>There is a <a href="overview-tree.html">Class Hierarchy</a> page for all packages, plus a hierarchy for each package. Each hierarchy page contains a list of classes and a list of interfaces. The classes are organized by inheritance structure starting with <code>java.lang.Object</code>. The interfaces do not inherit from <code>java.lang.Object</code>.</p>
+<ul>
+<li>When viewing the Overview page, clicking on "Tree" displays the hierarchy for all packages.</li>
+<li>When viewing a particular package, class or interface page, clicking "Tree" displays the hierarchy for only that package.</li>
+</ul>
+</li>
+<li class="blockList">
+<h2>Deprecated API</h2>
+<p>The <a href="deprecated-list.html">Deprecated API</a> page lists all of the API that have been deprecated. A deprecated API is not recommended for use, generally due to improvements, and a replacement API is usually given. Deprecated APIs may be removed in future implementations.</p>
+</li>
+<li class="blockList">
+<h2>Index</h2>
+<p>The <a href="index-all.html">Index</a> contains an alphabetic list of all classes, interfaces, constructors, methods, and fields.</p>
+</li>
+<li class="blockList">
+<h2>Prev/Next</h2>
+<p>These links take you to the next or previous class, interface, package, or related page.</p>
+</li>
+<li class="blockList">
+<h2>Frames/No Frames</h2>
+<p>These links show and hide the HTML frames.  All pages are available with or without frames.</p>
+</li>
+<li class="blockList">
+<h2>All Classes</h2>
+<p>The <a href="allclasses-noframe.html">All Classes</a> link shows all classes and interfaces except non-static nested types.</p>
+</li>
+<li class="blockList">
+<h2>Serialized Form</h2>
+<p>Each serializable or externalizable class has a description of its serialization fields and methods. This information is of interest to re-implementors, not to developers using the API. While there is no link in the navigation bar, you can get to this information by going to any serialized class and clicking "Serialized Form" in the "See also" section of the class description.</p>
+</li>
+<li class="blockList">
+<h2>Constant Field Values</h2>
+<p>The <a href="constant-values.html">Constant Field Values</a> page lists the static final fields and their values.</p>
+</li>
+</ul>
+<em>This help file applies to API documentation generated using the standard doclet.</em></div>
+<!-- ======= START OF BOTTOM NAVBAR ====== -->
+<div class="bottomNav"><a name="navbar_bottom">
+<!--   -->
+</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
+<!--   -->
+</a>
+<ul class="navList" title="Navigation">
+<li><a href="overview-summary.html">Overview</a></li>
+<li>Package</li>
+<li>Class</li>
+<li><a href="overview-tree.html">Tree</a></li>
+<li><a href="deprecated-list.html">Deprecated</a></li>
+<li><a href="index-all.html">Index</a></li>
+<li class="navBarCell1Rev">Help</li>
+</ul>
+</div>
+<div class="subNav">
+<ul class="navList">
+<li>Prev</li>
+<li>Next</li>
+</ul>
+<ul class="navList">
+<li><a href="index.html?help-doc.html" target="_top">Frames</a></li>
+<li><a href="help-doc.html" target="_top">No Frames</a></li>
+</ul>
+<ul class="navList" id="allclasses_navbar_bottom">
+<li><a href="allclasses-noframe.html">All Classes</a></li>
+</ul>
+<div>
+<script type="text/javascript"><!--
+  allClassesLink = document.getElementById("allclasses_navbar_bottom");
+  if(window==top) {
+    allClassesLink.style.display = "block";
+  }
+  else {
+    allClassesLink.style.display = "none";
+  }
+  //-->
+</script>
+</div>
+<a name="skip-navbar_bottom">
+<!--   -->
+</a></div>
+<!-- ======== END OF BOTTOM NAVBAR ======= -->
+<script defer="defer" type="text/javascript" src="lib/jquery.js"></script><script defer="defer" type="text/javascript" src="lib/api-javadocs.js"></script></body>
+</html>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org