You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2016/03/03 10:53:10 UTC
spark git commit: [HOT-FIX] Recover some deprecations for 2.10
compatibility.
Repository: spark
Updated Branches:
refs/heads/master 7b25dc7b7 -> 02b7677e9
[HOT-FIX] Recover some deprecations for 2.10 compatibility.
## What changes were proposed in this pull request?
#11479 [SPARK-13627] broke 2.10 compatibility: [2.10-Build](https://amplab.cs.berkeley.edu/jenkins/view/Spark%20QA%20Compile/job/spark-master-compile-maven-scala-2.10/292/console)
At this moment, we need to support both 2.10 and 2.11.
This PR recovers some deprecated methods which were replace by [SPARK-13627].
## How was this patch tested?
Jenkins build: Both 2.10, 2.11.
Author: Dongjoon Hyun <do...@apache.org>
Closes #11488 from dongjoon-hyun/hotfix_compatibility_with_2.10.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/02b7677e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/02b7677e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/02b7677e
Branch: refs/heads/master
Commit: 02b7677e9584f5ccd68869abdb0bf980dc847ce1
Parents: 7b25dc7
Author: Dongjoon Hyun <do...@apache.org>
Authored: Thu Mar 3 09:53:02 2016 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu Mar 3 09:53:02 2016 +0000
----------------------------------------------------------------------
.../scala/org/apache/spark/examples/mllib/AbstractParams.scala | 2 +-
.../scala/org/apache/spark/sql/catalyst/ScalaReflection.scala | 6 +++---
.../spark/sql/catalyst/expressions/codegen/package.scala | 2 +-
.../main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala | 4 ++--
4 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/02b7677e/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala
----------------------------------------------------------------------
diff --git a/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala b/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala
index 8985c85..ae60577 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/AbstractParams.scala
@@ -38,7 +38,7 @@ abstract class AbstractParams[T: TypeTag] {
*/
override def toString: String = {
val tpe = tag.tpe
- val allAccessors = tpe.decls.collect {
+ val allAccessors = tpe.declarations.collect {
case m: MethodSymbol if m.isCaseAccessor => m
}
val mirror = runtimeMirror(getClass.getClassLoader)
http://git-wip-us.apache.org/repos/asf/spark/blob/02b7677e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 4f1911c..02cb2d9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -774,9 +774,9 @@ trait ScalaReflection {
}
protected def constructParams(tpe: Type): Seq[Symbol] = {
- val constructorSymbol = tpe.member(termNames.CONSTRUCTOR)
+ val constructorSymbol = tpe.member(nme.CONSTRUCTOR)
val params = if (constructorSymbol.isMethod) {
- constructorSymbol.asMethod.paramLists
+ constructorSymbol.asMethod.paramss
} else {
// Find the primary constructor, and use its parameter ordering.
val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find(
@@ -784,7 +784,7 @@ trait ScalaReflection {
if (primaryConstructorSymbol.isEmpty) {
sys.error("Internal SQL error: Product object did not have a primary constructor.")
} else {
- primaryConstructorSymbol.get.asMethod.paramLists
+ primaryConstructorSymbol.get.asMethod.paramss
}
}
params.flatten
http://git-wip-us.apache.org/repos/asf/spark/blob/02b7677e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala
index 382c718..41128fe 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/package.scala
@@ -51,7 +51,7 @@ package object codegen {
val classLoader =
generatedClass
.getClassLoader
- .asInstanceOf[scala.reflect.internal.util.AbstractFileClassLoader]
+ .asInstanceOf[scala.tools.nsc.interpreter.AbstractFileClassLoader]
val generatedBytes = classLoader.classBytes(generatedClass.getName)
val packageDir = new java.io.File(dumpDirectory, generatedClass.getPackage.getName)
http://git-wip-us.apache.org/repos/asf/spark/blob/02b7677e/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
index 3eed6ae..a947fac 100644
--- a/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/GenerateMIMAIgnore.scala
@@ -44,10 +44,10 @@ object GenerateMIMAIgnore {
private def isDeveloperApi(sym: unv.Symbol) =
- sym.annotations.exists(_.tree.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi])
+ sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.DeveloperApi])
private def isExperimental(sym: unv.Symbol) =
- sym.annotations.exists(_.tree.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental])
+ sym.annotations.exists(_.tpe =:= unv.typeOf[org.apache.spark.annotation.Experimental])
private def isPackagePrivate(sym: unv.Symbol) =
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org