You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2018/06/13 00:44:19 UTC

[GitHub] lanking520 closed pull request #11123: [MXNET-319][DO NOT MERGE][Deprecated] Javadoc fix

lanking520 closed pull request #11123: [MXNET-319][DO NOT MERGE][Deprecated] Javadoc fix
URL: https://github.com/apache/incubator-mxnet/pull/11123
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml
index 361bfab5d61..724ffbb369d 100644
--- a/scala-package/core/pom.xml
+++ b/scala-package/core/pom.xml
@@ -65,7 +65,24 @@
         <groupId>org.scalastyle</groupId>
         <artifactId>scalastyle-maven-plugin</artifactId>
       </plugin>
+      <plugin>
+        <groupId>net.alchim31.maven</groupId>
+        <artifactId>scala-maven-plugin</artifactId>
+        <version>3.3.2</version>
+        <configuration>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <id>attach-javadocs</id>
+            <goals>
+              <goal>doc-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
+
   </build>
   <dependencies>
     <dependency>
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/EvalMetric.scala b/scala-package/core/src/main/scala/org/apache/mxnet/EvalMetric.scala
index de2881a221c..4556d4f7bfa 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/EvalMetric.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/EvalMetric.scala
@@ -108,9 +108,9 @@ class Accuracy extends EvalMetric("accuracy") {
 
     for ((pred, label) <- preds zip labels) {
       val predLabel = if (pred.shape == label.shape) {
-        NDArray.argmax(Map("axis" -> 1, "keepdims" -> true))(pred)
+        NDArray.api.argmax(pred, Some(1), Some(true))
       } else {
-        NDArray.argmax_channel(pred)
+        NDArray.api.argmax_channel(pred)
       }
       require(label.shape == predLabel.shape,
         s"label ${label.shape} and prediction ${predLabel.shape}" +
@@ -172,7 +172,7 @@ class F1 extends EvalMetric("f1") {
       "labels and predictions should have the same length.")
 
     for ((pred, label) <- preds zip labels) {
-      val predLabel = NDArray.argmax_channel(pred)
+      val predLabel = NDArray.api.argmax_channel(pred)
       require(label.shape == predLabel.shape,
         s"label ${label.shape} and prediction ${predLabel.shape}" +
         s"should have the same length.")
@@ -232,7 +232,7 @@ class Perplexity(ignoreLabel: Option[Int] = None, axis: Int = -1) extends EvalMe
       require(label.size == pred.size / pred.shape.toArray.reverse.head,
         s"shape mismatch: ${label.shape} vs. ${pred.shape}")
       val l = label.asInContext(pred.context).asType(DType.Int32).reshape(Shape(label.size))
-      val p = NDArray.pick(Map("axis" -> this.axis))(pred, label)
+      val p = NDArray.api.pick(pred, label, Some(this.axis))
       probs += p.head
     }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Monitor.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Monitor.scala
index 8e53d652fde..25ae374aa39 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Monitor.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Monitor.scala
@@ -38,7 +38,7 @@ class Monitor(
 
   if (statFunc == null) {
     statFunc = (x: NDArray) => {
-      NDArray.norm(x) / math.sqrt(x.size.toDouble).toFloat
+      NDArray.api.norm(x) / math.sqrt(x.size.toDouble).toFloat
     }
   }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
index 469107aa58c..49f4d35136f 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
@@ -65,12 +65,12 @@ object NDArray {
     val ndArgs = ArrayBuffer.empty[NDArray]
     val posArgs = ArrayBuffer.empty[String]
     args.foreach {
-      case arr: NDArray =>
-        ndArgs.append(arr)
-      case arrFunRet: NDArrayFuncReturn =>
-        arrFunRet.arr.foreach(ndArgs.append(_))
-      case arg =>
-        posArgs.append(arg.toString)
+        case arr: NDArray =>
+          ndArgs.append(arr)
+        case arrFunRet: NDArrayFuncReturn =>
+          arrFunRet.arr.foreach(ndArgs.append(_))
+        case arg =>
+          posArgs.append(arg.toString)
     }
 
     require(posArgs.length <= function.arguments.length,
@@ -81,6 +81,7 @@ object NDArray {
         ++ function.arguments.slice(0, posArgs.length).zip(posArgs) - "out"
       ).map { case (k, v) => k -> v.toString }
 
+
     val (oriOutputs, outputVars) =
       if (kwargs != null && kwargs.contains("out")) {
         val output = kwargs("out")
@@ -537,6 +538,10 @@ object NDArray {
     new NDArray(handleRef.value)
   }
 
+  private def _crop_assign(kwargs: Map[String, Any] = null)(args: Any*) : NDArrayFuncReturn = {
+    genericNDArrayFunctionInvoke("_crop_assign", args, kwargs)
+  }
+
   // TODO: imdecode
 }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/annotation/Experimental.scala b/scala-package/core/src/main/scala/org/apache/mxnet/annotation/Experimental.scala
new file mode 100644
index 00000000000..33d1d330979
--- /dev/null
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/annotation/Experimental.scala
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.mxnet.annotation
+
+import java.lang.annotation.{ElementType, Retention, Target, _}
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(Array(ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER,
+  ElementType.CONSTRUCTOR, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE))
+class Experimental {}
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
index c13ebcd6260..03734693eee 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
@@ -40,18 +40,8 @@ private object DataParallelExecutorGroup {
           val end = shape.toArray
           begin(axis) = sliceIdxStart
           end(axis) = sliceIdxStop
-          if (dSrc.context == dDst.context) {
-            NDArray.crop(Map(
-              "begin" -> new Shape(begin),
-              "end" -> new Shape(end),
-              "out" -> dDst))(dSrc)
-          } else {
-            // on different device, crop and then do cross device copy
-            val dDstCopy: NDArray = NDArray.crop(Map(
-              "begin" -> new Shape(begin),
-              "end" -> new Shape(end)))(dSrc)
-            dDstCopy.copyTo(dDst)
-          }
+          NDArray.api.crop(data = dSrc,
+            begin = new Shape(begin), end = new Shape(end)).copyTo(dDst)
         } else {
           dSrc.copyTo(dDst)
         }
@@ -569,8 +559,8 @@ class DataParallelExecutorGroup private[module](
         if (outGrads != null) {
           (outGrads zip outputLayouts).map { case (grad, axis) =>
             if (axis >= 0) {
-              val ogMySlice: NDArray = NDArray.slice_axis(
-                Map("axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(grad)
+              val ogMySlice: NDArray = NDArray.api.slice_axis(data = grad,
+                axis = axis, begin = islice._1, end = islice._2)
               ogMySlice.asInContext(contexts(i))
             } else {
               grad.copyTo(contexts(i))
@@ -595,8 +585,8 @@ class DataParallelExecutorGroup private[module](
           if (axis == 0) {
             label.slice(islice)
           } else if (axis > 0) {
-            val labelMySlice: NDArray = NDArray.slice_axis(Map(
-              "axis" -> axis, "begin" -> islice._1, "end" -> islice._2))(label)
+            val labelMySlice: NDArray = NDArray.api.slice_axis(data = label,
+              axis = axis, begin = islice._1, end = islice._2)
               .asInContext(label.context)
             labelMySlice
           } else {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaDelta.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaDelta.scala
index 3afe509b947..24c2d9f71eb 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaDelta.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaDelta.scala
@@ -49,7 +49,7 @@ class AdaDelta(rho: Float = 0.05f, rescaleGradient: Float = 1.0f,
 
     if (clipGradient != 0f) {
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
@@ -59,8 +59,8 @@ class AdaDelta(rho: Float = 0.05f, rescaleGradient: Float = 1.0f,
       resdGrad * resdGrad).disposeDepsExcept(accG, resdGrad)
     accG.set(newAccG)
     val currentDelta = (
-      NDArray.sqrt(accDelta + this.epsilon) /
-      NDArray.sqrt(accG + this.epsilon) * resdGrad).disposeDepsExcept(accDelta, accG, resdGrad)
+      NDArray.api.sqrt(accDelta + this.epsilon) /
+      NDArray.api.sqrt(accG + this.epsilon) * resdGrad).disposeDepsExcept(accDelta, accG, resdGrad)
     val newAccDelta = (this.rho * accDelta +
       (1.0f - this.rho) * currentDelta * currentDelta).disposeDepsExcept(accDelta, currentDelta)
     accDelta.set(newAccDelta)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaGrad.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaGrad.scala
index ed3c5139229..ed450f4e26e 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaGrad.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/AdaGrad.scala
@@ -51,7 +51,7 @@ class AdaGrad(val learningRate: Float = 0.05f, rescaleGradient: Float = 1.0f,
     history += gradSquared
     gradSquared.dispose()
 
-    val newWeight = (-lr * (resdGrad / NDArray.sqrt(history + this.epsilon) + this.wd * weight))
+    val newWeight = (-lr * (resdGrad / NDArray.api.sqrt(history + this.epsilon) + this.wd * weight))
       .disposeDepsExcept(resdGrad, history, weight)
     weight += newWeight
     newWeight.dispose()
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/Adam.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/Adam.scala
index 24f3323073f..4c531882c5c 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/Adam.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/Adam.scala
@@ -87,7 +87,7 @@ class Adam(val learningRate: Float = 0.002f, beta1: Float = 0.9f, beta2: Float =
     var resdGrad = grad * rescaleGrad
     if (clipGradient != 0f) {
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
@@ -96,7 +96,7 @@ class Adam(val learningRate: Float = 0.002f, beta1: Float = 0.9f, beta2: Float =
     val varianceT = (beta2 * variance + (1.0f - beta2) * resdGrad * resdGrad)
       .disposeDepsExcept(variance, resdGrad)
 
-    val step = (learningRate * meanT / (NDArray.sqrt(varianceT) + epsilon))
+    val step = (learningRate * meanT / (NDArray.api.sqrt(varianceT) + epsilon))
       .disposeDepsExcept(meanT, varianceT)
 
     val wd = this.getWd(index, this.wd)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/DCASGD.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/DCASGD.scala
index 6b5053b74a0..b84d8ef2ee0 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/DCASGD.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/DCASGD.scala
@@ -58,7 +58,7 @@ class DCASGD(val learningRate: Float = 0.01f, momentum: Float = 0.0f,
     if (clipGradient != 0f) {
       // to get rid of memory leak
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/NAG.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/NAG.scala
index 47fe62d17f4..7fbb5936a5d 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/NAG.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/NAG.scala
@@ -64,7 +64,7 @@ class NAG(val learningRate: Float = 0.01f, momentum: Float = 0.0f,
     if (clipGradient != 0f) {
       // to get rid of memory leak
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/RMSProp.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/RMSProp.scala
index 49fca6a1242..141b47ee3ba 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/RMSProp.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/RMSProp.scala
@@ -53,7 +53,7 @@ class RMSProp(val learningRate: Float = 0.002f, rescaleGradient: Float = 1.0f,
     var resdGrad = grad * this.rescaleGrad
     if (clipGradient != 0f) {
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
@@ -68,7 +68,7 @@ class RMSProp(val learningRate: Float = 0.002f, rescaleGradient: Float = 1.0f,
     gUpdated.dispose()
 
     val deltaUpdated =
-      (this.gamma2 * delta - lr * (resdGrad / NDArray.sqrt(n - g * g + 1e-4f) + wd * weight))
+      (this.gamma2 * delta - lr * (resdGrad / NDArray.api.sqrt(n - g * g + 1e-4f) + wd * weight))
       .disposeDepsExcept(delta, resdGrad, n, g, weight)
     delta.set(deltaUpdated)
     deltaUpdated.dispose()
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala
index c1b72591952..edcd861d05e 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala
@@ -56,7 +56,7 @@ class SGD(val learningRate: Float = 0.01f, momentum: Float = 0.0f,
     if (clipGradient != 0f) {
       // to get rid of memory leak
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGLD.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGLD.scala
index 0765716c4b7..80b24ad04bc 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGLD.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGLD.scala
@@ -62,7 +62,7 @@ class SGLD(val learningRate: Float = 0.01f, rescaleGradient: Float = 1.0f,
     if (clipGradient != 0f) {
       // to get rid of memory leak
       val oldResdGrad = resdGrad
-      resdGrad = NDArray.clip(resdGrad, -clipGradient, clipGradient)
+      resdGrad = NDArray.api.clip(resdGrad, -clipGradient, clipGradient)
       oldResdGrad.dispose()
     }
 
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
index 90fe2604e8b..8e4f3693f2f 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
@@ -52,8 +52,9 @@ private[mxnet] object APIDocGenerator{
     val apacheLicence = "/*\n* Licensed to the Apache Software Foundation (ASF) under one or more\n* contributor license agreements.  See the NOTICE file distributed with\n* this work for additional information regarding copyright ownership.\n* The ASF licenses this file to You under the Apache License, Version 2.0\n* (the \"License\"); you may not use this file except in compliance with\n* the License.  You may obtain a copy of the License at\n*\n*    http://www.apache.org/licenses/LICENSE-2.0\n*\n* Unless required by applicable law or agreed to in writing, software\n* distributed under the License is distributed on an \"AS IS\" BASIS,\n* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n* See the License for the specific language governing permissions and\n* limitations under the License.\n*/\n"
     val scalaStyle = "// scalastyle:off"
     val packageDef = "package org.apache.mxnet"
+    val imports = "import org.apache.mxnet.annotation.Experimental"
     val absClassDef = s"abstract class $packageName"
-    val finalStr = s"$apacheLicence\n$scalaStyle\n$packageDef\n$absClassDef {\n${absFuncs.mkString("\n")}\n}"
+    val finalStr = s"$apacheLicence\n$scalaStyle\n$packageDef\n$imports\n$absClassDef {\n${absFuncs.mkString("\n")}\n}"
     import java.io._
     val pw = new PrintWriter(new File(FILE_PATH + s"$packageName.scala"))
     pw.write(finalStr)
@@ -61,6 +62,7 @@ private[mxnet] object APIDocGenerator{
   }
 
   // Generate ScalaDoc type
+
   def generateAPIDocFromBackend(func : absClassFunction) : String = {
     val desc = func.desc.split("\n").map({ currStr =>
       s"  * $currStr"
@@ -97,9 +99,12 @@ private[mxnet] object APIDocGenerator{
       argDef += "name : String = null"
       argDef += "attr : Map[String, String] = null"
     } else {
+      argDef += "out : Option[NDArray] = None"
       returnType = "org.apache.mxnet.NDArrayFuncReturn"
+      argDef += "out : Option[NDArray] = None"
     }
-    s"def ${func.name} (${argDef.mkString(", ")}) : ${returnType}"
+    val experimentalTag = "@Experimental"
+    s"$experimentalTag\ndef ${func.name} (${argDef.mkString(", ")}) : $returnType"
   }
 
 
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
index ce5b532bc8b..c128265b7b5 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
@@ -21,7 +21,7 @@ import org.apache.mxnet.init.Base._
 import org.apache.mxnet.utils.{CToScalaUtils, OperatorBuildUtils}
 
 import scala.annotation.StaticAnnotation
-import scala.collection.mutable.ListBuffer
+import scala.collection.mutable.{ArrayBuffer, ListBuffer}
 import scala.language.experimental.macros
 import scala.reflect.macros.blackbox
 
@@ -57,14 +57,13 @@ private[mxnet] object NDArrayMacro {
 
     val newNDArrayFunctions = {
       if (isContrib) ndarrayFunctions.filter(_.name.startsWith("_contrib_"))
-      else ndarrayFunctions.filter(!_.name.startsWith("_contrib_"))
+      else ndarrayFunctions.filterNot(_.name.startsWith("_"))
     }
 
      val functionDefs = newNDArrayFunctions flatMap { NDArrayfunction =>
         val funcName = NDArrayfunction.name
         val termName = TermName(funcName)
-        if (!NDArrayfunction.name.startsWith("_") || NDArrayfunction.name.startsWith("_contrib_")) {
-          Seq(
+       Seq(
             // scalastyle:off
             // (yizhi) We are investigating a way to make these functions type-safe
             // and waiting to see the new approach is stable enough.
@@ -75,16 +74,7 @@ private[mxnet] object NDArrayMacro {
             q"def $termName(args: Any*) = {genericNDArrayFunctionInvoke($funcName, args, null)}".asInstanceOf[DefDef]
             // scalastyle:on
           )
-        } else {
-          // Default private
-          Seq(
-            // scalastyle:off
-            q"private def $termName(kwargs: Map[String, Any] = null)(args: Any*) = {genericNDArrayFunctionInvoke($funcName, args, kwargs)}".asInstanceOf[DefDef],
-            q"private def $termName(args: Any*) = {genericNDArrayFunctionInvoke($funcName, args, null)}".asInstanceOf[DefDef]
-            // scalastyle:on
-          )
         }
-      }
 
     structGeneration(c)(functionDefs, annottees : _*)
   }
@@ -109,6 +99,7 @@ private[mxnet] object NDArrayMacro {
       // Construct Implementation field
       var impl = ListBuffer[String]()
       impl += "val map = scala.collection.mutable.Map[String, Any]()"
+      impl += "val args = scala.collection.mutable.ArrayBuffer.empty[NDArray]"
       ndarrayfunction.listOfArgs.foreach({ ndarrayarg =>
         // var is a special word used to define variable in Scala,
         // need to changed to something else in order to make it work
@@ -123,14 +114,31 @@ private[mxnet] object NDArrayMacro {
         else {
           argDef += s"${currArgName} : ${ndarrayarg.argType}"
         }
-        var base = "map(\"" + ndarrayarg.argName + "\") = " + currArgName
+        // NDArray arg implementation
+        val returnType = "org.apache.mxnet.NDArray"
+        var base = ""
+        // TODO: Currently we do not add place holder for NDArray
+        // Example: an NDArray operator like the following format
+        // nd.foo(arg1: NDArray(required), arg2: NDArray(Optional), arg3: NDArray(Optional)
+        // If we place nd.foo(arg1, arg3 = arg3), do we need to add place holder for arg2?
+        // What it should be?
+        if (ndarrayarg.argType.equals(returnType)) {
+          base = s"args += $currArgName"
+        } else if (ndarrayarg.argType.equals(s"Array[$returnType]")){
+          base = s"args ++= $currArgName"
+        } else {
+          base = "map(\"" + ndarrayarg.argName + "\") = " + currArgName
+        }
         if (ndarrayarg.isOptional) {
           base = "if (!" + currArgName + ".isEmpty)" + base + ".get"
         }
         impl += base
       })
+      // add default out parameter
+      argDef += "out : Option[NDArray] = None"
+      impl += "if (!out.isEmpty) map(\"out\") = out.get"
       // scalastyle:off
-      impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", null, map.toMap)"
+      impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)"
       // scalastyle:on
       // Combine and build the function string
       val returnType = "org.apache.mxnet.NDArrayFuncReturn"
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
index bacbdb2e307..b314a6f1bad 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
@@ -104,6 +104,7 @@ private[mxnet] object SymbolImplMacros {
       // Construct Implementation field
       var impl = ListBuffer[String]()
       impl += "val map = scala.collection.mutable.Map[String, Any]()"
+      impl += "var args = Seq[org.apache.mxnet.Symbol]()"
       symbolfunction.listOfArgs.foreach({ symbolarg =>
         // var is a special word used to define variable in Scala,
         // need to changed to something else in order to make it work
@@ -118,17 +119,28 @@ private[mxnet] object SymbolImplMacros {
         else {
           argDef += s"${currArgName} : ${symbolarg.argType}"
         }
-        var base = "map(\"" + symbolarg.argName + "\") = " + currArgName
-        if (symbolarg.isOptional) {
-          base = "if (!" + currArgName + ".isEmpty)" + base + ".get"
+        // Symbol arg implementation
+        val returnType = "org.apache.mxnet.Symbol"
+        var base = ""
+        if (symbolarg.argType.equals(s"Array[$returnType]")) {
+          base = s"args = $currArgName.toSeq"
+          if (symbolarg.isOptional) {
+            base = s"if (!$currArgName.isEmpty) args = $currArgName.get.toSeq"
+          }
+        } else {
+          base = "map(\"" + symbolarg.argName + "\") = " + currArgName
+          if (symbolarg.isOptional) {
+            base = "if (!" + currArgName + ".isEmpty)" + base + ".get"
+          }
         }
+
         impl += base
       })
       argDef += "name : String = null"
       argDef += "attr : Map[String, String] = null"
       // scalastyle:off
       // TODO: Seq() here allows user to place Symbols rather than normal arguments to run, need to fix if old API deprecated
-      impl += "org.apache.mxnet.Symbol.createSymbolGeneral(\"" + symbolfunction.name + "\", name, attr, Seq(), map.toMap)"
+      impl += "org.apache.mxnet.Symbol.createSymbolGeneral(\"" + symbolfunction.name + "\", name, attr, args, map.toMap)"
       // scalastyle:on
       // Combine and build the function string
       val returnType = "org.apache.mxnet.Symbol"


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services