You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/04/20 00:06:16 UTC

git commit: Use scala deprecation instead of java.

Repository: spark
Updated Branches:
  refs/heads/master 28238c81d -> 5d0f58b2e


Use scala deprecation instead of java.

This gets rid of a warning when compiling core (since we were depending on a deprecated interface with a non-deprecated function).  I also tested with javac, and this does the right thing when compiling java code.

Author: Michael Armbrust <mi...@databricks.com>

Closes #452 from marmbrus/scalaDeprecation and squashes the following commits:

f628b4d [Michael Armbrust] Use scala deprecation instead of java.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5d0f58b2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5d0f58b2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5d0f58b2

Branch: refs/heads/master
Commit: 5d0f58b2eb8e48a95c4ab34bc89f7251d093f301
Parents: 28238c8
Author: Michael Armbrust <mi...@databricks.com>
Authored: Sat Apr 19 15:06:04 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sat Apr 19 15:06:04 2014 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/api/java/JavaSparkContext.scala    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5d0f58b2/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index cf30523..bda9272 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -114,7 +114,7 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
    * @deprecated As of Spark 1.0.0, defaultMinSplits is deprecated, use
    *            {@link #defaultMinPartitions()} instead
    */
-  @Deprecated
+  @deprecated("use defaultMinPartitions", "1.0.0")
   def defaultMinSplits: java.lang.Integer = sc.defaultMinSplits
 
   /** Default min number of partitions for Hadoop RDDs when not given by user */