You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2023/03/18 02:46:43 UTC

[spark] branch master updated: [SPARK-42803][CORE][SQL][ML] Use getParameterCount function instead of getParameterTypes.length

This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 11c9838283e [SPARK-42803][CORE][SQL][ML] Use getParameterCount function instead of getParameterTypes.length
11c9838283e is described below

commit 11c9838283e98d5ebe6ce13b85e26217494feef2
Author: narek_karapetian <na...@yandex.ru>
AuthorDate: Fri Mar 17 21:46:25 2023 -0500

    [SPARK-42803][CORE][SQL][ML] Use getParameterCount function instead of getParameterTypes.length
    
    ### What changes were proposed in this pull request?
    
    Since jdk1.8 there is an additional function in reflection API `getParameterCount`, it is better to use that function instead of `getParameterTypes.length` because `getParameterTypes` function makes a copy of the parameter types array every invocation:
    ```java
        public Class<?>[] getParameterTypes() {
            return parameterTypes.clone();
        }
    ```
    `getParameterCount` returns amount of parameters directly:
    ```java
        public int getParameterCount() { return parameterTypes.length; }
    ```
    
    ### Why are the changes needed?
    To avoid redundant arrays creation.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    By existing unit tests
    
    Closes #40422 from NarekDW/reflection-get-parameter-count.
    
    Authored-by: narek_karapetian <na...@yandex.ru>
    Signed-off-by: Sean Owen <sr...@gmail.com>
---
 .../src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java     | 2 +-
 mllib/common/src/main/scala/org/apache/spark/ml/param/params.scala  | 2 +-
 .../main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala   | 6 +++---
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java
index a15d07cf599..bf7c256fc94 100644
--- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java
+++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java
@@ -56,7 +56,7 @@ public class KVTypeInfo {
       KVIndex idx = m.getAnnotation(KVIndex.class);
       if (idx != null) {
         checkIndex(idx, indices);
-        Preconditions.checkArgument(m.getParameterTypes().length == 0,
+        Preconditions.checkArgument(m.getParameterCount() == 0,
           "Annotated method %s::%s should not have any parameters.", type.getName(), m.getName());
         m.setAccessible(true);
         indices.put(idx.value(), idx);
diff --git a/mllib/common/src/main/scala/org/apache/spark/ml/param/params.scala b/mllib/common/src/main/scala/org/apache/spark/ml/param/params.scala
index 52840e04eae..b818be30583 100644
--- a/mllib/common/src/main/scala/org/apache/spark/ml/param/params.scala
+++ b/mllib/common/src/main/scala/org/apache/spark/ml/param/params.scala
@@ -652,7 +652,7 @@ trait Params extends Identifiable with Serializable {
     methods.filter { m =>
         Modifier.isPublic(m.getModifiers) &&
           classOf[Param[_]].isAssignableFrom(m.getReturnType) &&
-          m.getParameterTypes.isEmpty
+          m.getParameterCount == 0
       }.sortBy(_.getName)
       .map(m => m.invoke(this).asInstanceOf[Param[_]])
   }
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
index b90fc585a09..7468d895cff 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
@@ -786,7 +786,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product with Tre
     }
 
     // Skip no-arg constructors that are just there for kryo.
-    val ctors = allCtors.filter(allowEmptyArgs || _.getParameterTypes.size != 0)
+    val ctors = allCtors.filter(allowEmptyArgs || _.getParameterCount != 0)
     if (ctors.isEmpty) {
       throw QueryExecutionErrors.constructorNotFoundError(nodeName)
     }
@@ -796,7 +796,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product with Tre
       newArgs ++ otherCopyArgs
     }
     val defaultCtor = ctors.find { ctor =>
-      if (ctor.getParameterTypes.length != allArgs.length) {
+      if (ctor.getParameterCount != allArgs.length) {
         false
       } else if (allArgs.contains(null)) {
         // if there is a `null`, we can't figure out the class, therefore we should just fallback
@@ -806,7 +806,7 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product with Tre
         val argsArray: Array[Class[_]] = allArgs.map(_.getClass)
         ClassUtils.isAssignable(argsArray, ctor.getParameterTypes, true /* autoboxing */)
       }
-    }.getOrElse(ctors.maxBy(_.getParameterTypes.length)) // fall back to older heuristic
+    }.getOrElse(ctors.maxBy(_.getParameterCount)) // fall back to older heuristic
 
     try {
       CurrentOrigin.withOrigin(origin) {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org