You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2014/05/13 05:08:38 UTC

git commit: SPARK-1815. SparkContext should not be marked DeveloperApi

Repository: spark
Updated Branches:
  refs/heads/master 2ffd1eafd -> 2792bd016


SPARK-1815. SparkContext should not be marked DeveloperApi

Author: Sandy Ryza <sa...@cloudera.com>

Closes #753 from sryza/sandy-spark-1815 and squashes the following commits:

957a8ac [Sandy Ryza] SPARK-1815. SparkContext should not be marked DeveloperApi


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2792bd01
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2792bd01
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2792bd01

Branch: refs/heads/master
Commit: 2792bd016af2a67848e6f403c4e1e05e9f3e3c2a
Parents: 2ffd1ea
Author: Sandy Ryza <sa...@cloudera.com>
Authored: Mon May 12 20:08:30 2014 -0700
Committer: Patrick Wendell <pw...@gmail.com>
Committed: Mon May 12 20:08:30 2014 -0700

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/SparkContext.scala | 2 --
 1 file changed, 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2792bd01/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 35beff0..c43b4fd 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -50,7 +50,6 @@ import org.apache.spark.ui.SparkUI
 import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedWeakValueHashMap, Utils}
 
 /**
- * :: DeveloperApi ::
  * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
  * cluster, and can be used to create RDDs, accumulators and broadcast variables on that cluster.
  *
@@ -58,7 +57,6 @@ import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerTy
  *   this config overrides the default configs as well as system properties.
  */
 
-@DeveloperApi
 class SparkContext(config: SparkConf) extends Logging {
 
   // This is used only by YARN for now, but should be relevant to other cluster types (Mesos,