You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2018/10/09 18:41:51 UTC

[GitHub] lanking520 closed pull request #12624: [MXNET-913] Java API -- NDArray Macros remove Option field

lanking520 closed pull request #12624: [MXNET-913] Java API -- NDArray Macros remove Option field
URL: https://github.com/apache/incubator-mxnet/pull/12624
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/cnntextclassification/CNNTextClassification.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/cnntextclassification/CNNTextClassification.scala
index 7745043b23d..1c09783391a 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/cnntextclassification/CNNTextClassification.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/cnntextclassification/CNNTextClassification.scala
@@ -43,11 +43,11 @@ object CNNTextClassification {
     val inputX = Symbol.Variable("data")
     val inputY = Symbol.Variable("softmax_label")
     val polledOutputs = filterList.map { filterSize =>
-      val conv = Symbol.api.Convolution(data = Some(inputX),
+      val conv = Symbol.api.Convolution(data = inputX,
         kernel = new Shape(filterSize, numEmbed), num_filter = numFilter)
-      val relu = Symbol.api.Activation(data = Some(conv), act_type = "relu")
-      val pool = Symbol.api.Pooling(data = Some(relu), pool_type = Some("max"),
-        kernel = Some(new Shape(sentenceSize - filterSize + 1, 1)), stride = Some(new Shape(1, 1)))
+      val relu = Symbol.api.Activation(data = conv, act_type = "relu")
+      val pool = Symbol.api.Pooling(data = relu, pool_type = "max",
+        kernel = new Shape(sentenceSize - filterSize + 1, 1), stride = new Shape(1, 1))
       relu.dispose()
       conv.dispose()
       pool
@@ -56,17 +56,17 @@ object CNNTextClassification {
     val totalFilters = numFilter * filterList.length
     // val concat = Symbol.Concat()(polledOutputs: _*)(Map("dim" -> 1))
     val concat = Symbol.api.concat(data = polledOutputs,
-      num_args = polledOutputs.length, dim = Some(1))
-    val hPool = Symbol.api.reshape(data = Some(concat),
-      target_shape = Some(new Shape(batchSize, totalFilters)))
+      num_args = polledOutputs.length, dim = 1)
+    val hPool = Symbol.api.reshape(data = concat,
+      target_shape = new Shape(batchSize, totalFilters))
 
     val hDrop = {
-      if (dropout > 0f) Symbol.api.Dropout(data = Some(hPool), p = Some(dropout))
+      if (dropout > 0f) Symbol.api.Dropout(data = hPool, p = dropout)
       else hPool
     }
 
-    val fc = Symbol.api.FullyConnected(data = Some(hDrop), num_hidden = numLabel)
-    val sm = Symbol.api.SoftmaxOutput(data = Some(fc), label = Some(inputY))
+    val fc = Symbol.api.FullyConnected(data = hDrop, num_hidden = numLabel)
+    val sm = Symbol.api.SoftmaxOutput(data = fc, label = inputY)
     fc.dispose()
     hDrop.dispose()
     hPool.dispose()
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOp.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOp.scala
index df79f5b6376..d2c0efc51e2 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOp.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOp.scala
@@ -96,11 +96,11 @@ object ExampleCustomOp {
   def test(dataPath : String, ctx : Context) : Float = {
     val data = Symbol.Variable("data")
     val label = Symbol.Variable("label")
-    val fc1 = Symbol.api.FullyConnected(data = Some(data), num_hidden = 128, name = "fc1")
-    val act1 = Symbol.api.Activation (data = Some(fc1), "relu", name = "relu")
-    val fc2 = Symbol.api.FullyConnected(Some(act1), None, None, 64, name = "fc2")
-    val act2 = Symbol.api.Activation(data = Some(fc2), "relu", name = "relu2")
-    val fc3 = Symbol.api.FullyConnected(Some(act2), None, None, 10, name = "fc3")
+    val fc1 = Symbol.api.FullyConnected(data = data, num_hidden = 128, name = "fc1")
+    val act1 = Symbol.api.Activation (data = fc1, "relu", name = "relu")
+    val fc2 = Symbol.api.FullyConnected(act1, null, null, 64, name = "fc2")
+    val act2 = Symbol.api.Activation(data = fc2, "relu", name = "relu2")
+    val fc3 = Symbol.api.FullyConnected(act2, null, null, 10, name = "fc3")
     val kwargs = mutable.Map[String, Any]("label" -> label, "data" -> fc3)
     val mlp = Symbol.api.Custom(op_type = "softmax", name = "softmax", kwargs = kwargs)
 
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOpWithRtc.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOpWithRtc.scala
index c3ac347353d..75bcc8d3ebb 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOpWithRtc.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/customop/ExampleCustomOpWithRtc.scala
@@ -117,11 +117,11 @@ object ExampleCustomOpWithRtc {
   def test(dataPath : String, ctx : Context) : Float = {
     val data = Symbol.Variable("data")
     val label = Symbol.Variable("label")
-    val fc1 = Symbol.api.FullyConnected(data = Some(data), num_hidden = 128, name = "fc1")
-    val act1 = Symbol.api.Activation (data = Some(fc1), "relu", name = "relu")
-    val fc2 = Symbol.api.FullyConnected(Some(act1), None, None, 64, name = "fc2")
-    val act2 = Symbol.api.Activation(data = Some(fc2), "relu", name = "relu2")
-    val fc3 = Symbol.api.FullyConnected(Some(act2), None, None, 10, name = "fc3")
+    val fc1 = Symbol.api.FullyConnected(data = data, num_hidden = 128, name = "fc1")
+    val act1 = Symbol.api.Activation (data = fc1, "relu", name = "relu")
+    val fc2 = Symbol.api.FullyConnected(act1, null, null, 64, name = "fc2")
+    val act2 = Symbol.api.Activation(data = fc2, "relu", name = "relu2")
+    val fc3 = Symbol.api.FullyConnected(act2, null, null, 10, name = "fc3")
     val kwargs = mutable.Map[String, Any]("label" -> label, "data" -> fc3,
       "forwardBlockDim" -> new Shape(1, 1, 1))
     val mlp = Symbol.api.Custom(op_type = "softmax", name = "softmax", kwargs = kwargs)
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/gan/GanMnist.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/gan/GanMnist.scala
index 475d91faa0d..cf8d09f621c 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/gan/GanMnist.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/gan/GanMnist.scala
@@ -32,25 +32,25 @@ object GanMnist {
   def deconv2D(data: Symbol, iShape: Shape, oShape: Shape,
                kShape: (Int, Int), name: String, stride: (Int, Int) = (2, 2)): Symbol = {
     val targetShape = Shape(oShape(oShape.length - 2), oShape(oShape.length - 1))
-    val net = Symbol.api.Deconvolution(data = Some(data), kernel = Shape(kShape._1, kShape._2),
-      stride = Some(Shape(stride._1, stride._2)), target_shape = Some(targetShape),
-      num_filter = oShape(0), no_bias = Some(true), name = name)
+    val net = Symbol.api.Deconvolution(data = data, kernel = Shape(kShape._1, kShape._2),
+      stride = Shape(stride._1, stride._2), target_shape = targetShape,
+      num_filter = oShape(0), no_bias = true, name = name)
     net
   }
 
   def deconv2DBnRelu(data: Symbol, prefix: String, iShape: Shape,
                      oShape: Shape, kShape: (Int, Int), eps: Float = 1e-5f + 1e-12f): Symbol = {
     var net = deconv2D(data, iShape, oShape, kShape, name = s"${prefix}_deconv")
-    net = Symbol.api.BatchNorm(name = s"${prefix}_bn", data = Some(net),
-      fix_gamma = Some(true), eps = Some(eps))
-    net = Symbol.api.Activation(data = Some(net), act_type = "relu", name = s"${prefix}_act")
+    net = Symbol.api.BatchNorm(name = s"${prefix}_bn", data = net,
+      fix_gamma = true, eps = eps.toDouble)
+    net = Symbol.api.Activation(data = net, act_type = "relu", name = s"${prefix}_act")
     net
   }
 
   def deconv2DAct(data: Symbol, prefix: String, actType: String,
                   iShape: Shape, oShape: Shape, kShape: (Int, Int)): Symbol = {
     var net = deconv2D(data, iShape, oShape, kShape, name = s"${prefix}_deconv")
-    net = Symbol.api.Activation(data = Some(net), act_type = "relu", name = s"${prefix}_act")
+    net = Symbol.api.Activation(data = net, act_type = "relu", name = s"${prefix}_act")
     net
   }
 
@@ -58,11 +58,11 @@ object GanMnist {
                    eps: Float = 1e-5f + 1e-12f): (Symbol, Symbol) = {
 
     val code = Symbol.Variable("rand")
-    var net = Symbol.api.FullyConnected(data = Some(code), num_hidden = 4 * 4 * ngf * 4,
-      no_bias = Some(true), name = " g1")
-    net = Symbol.api.Activation(data = Some(net), act_type = "relu", name = "gact1")
+    var net = Symbol.api.FullyConnected(data = code, num_hidden = 4 * 4 * ngf * 4,
+      no_bias = true, name = " g1")
+    net = Symbol.api.Activation(data = net, act_type = "relu", name = "gact1")
     // 4 x 4
-    net = Symbol.api.Reshape(data = Some(net), shape = Some(Shape(-1, ngf * 4, 4, 4)))
+    net = Symbol.api.Reshape(data = net, shape = Shape(-1, ngf * 4, 4, 4))
     // 8 x 8
     net = deconv2DBnRelu(net, prefix = "g2",
       iShape = Shape(ngf * 4, 4, 4), oShape = Shape(ngf * 2, 8, 8), kShape = (3, 3))
@@ -75,22 +75,22 @@ object GanMnist {
 
     val data = Symbol.Variable("data")
     // 28 x 28
-    val conv1 = Symbol.api.Convolution(data = Some(data), kernel = Shape(5, 5),
+    val conv1 = Symbol.api.Convolution(data = data, kernel = Shape(5, 5),
       num_filter = 20, name = "conv1")
-    val tanh1 = Symbol.api.Activation(data = Some(conv1), act_type = "tanh")
-    val pool1 = Symbol.api.Pooling(data = Some(tanh1), pool_type = Some("max"),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)))
+    val tanh1 = Symbol.api.Activation(data = conv1, act_type = "tanh")
+    val pool1 = Symbol.api.Pooling(data = tanh1, pool_type = "max",
+      kernel = Shape(2, 2), stride = Shape(2, 2))
     // second conv
-    val conv2 = Symbol.api.Convolution(data = Some(pool1), kernel = Shape(5, 5),
+    val conv2 = Symbol.api.Convolution(data = pool1, kernel = Shape(5, 5),
       num_filter = 50, name = "conv2")
-    val tanh2 = Symbol.api.Activation(data = Some(conv2), act_type = "tanh")
-    val pool2 = Symbol.api.Pooling(data = Some(tanh2), pool_type = Some("max"),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)))
-    var d5 = Symbol.api.Flatten(data = Some(pool2))
-    d5 = Symbol.api.FullyConnected(data = Some(d5), num_hidden = 500, name = "fc1")
-    d5 = Symbol.api.Activation(data = Some(d5), act_type = "tanh")
-    d5 = Symbol.api.FullyConnected(data = Some(d5), num_hidden = 1, name = "fc_dloss")
-    val dloss = Symbol.api.LogisticRegressionOutput(data = Some(d5), name = "dloss")
+    val tanh2 = Symbol.api.Activation(data = conv2, act_type = "tanh")
+    val pool2 = Symbol.api.Pooling(data = tanh2, pool_type = "max",
+      kernel = Shape(2, 2), stride = Shape(2, 2))
+    var d5 = Symbol.api.Flatten(data = pool2)
+    d5 = Symbol.api.FullyConnected(data = d5, num_hidden = 500, name = "fc1")
+    d5 = Symbol.api.Activation(data = d5, act_type = "tanh")
+    d5 = Symbol.api.FullyConnected(data = d5, num_hidden = 1, name = "fc_dloss")
+    val dloss = Symbol.api.LogisticRegressionOutput(data = d5, name = "dloss")
 
     (gout, dloss)
   }
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainMnist.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainMnist.scala
index 2f024fd039b..d20a5d9546d 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainMnist.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainMnist.scala
@@ -32,35 +32,35 @@ object TrainMnist {
     val data = Symbol.Variable("data")
 
     // val fc1 = Symbol.FullyConnected(name = "relu")()(Map("data" -> data, "act_type" -> "relu"))
-    val fc1 = Symbol.api.FullyConnected(data = Some(data), num_hidden = 128, name = "fc1")
-    val act1 = Symbol.api.Activation (data = Some(fc1), "relu", name = "relu")
-    val fc2 = Symbol.api.FullyConnected(Some(act1), None, None, 64, name = "fc2")
-    val act2 = Symbol.api.Activation(data = Some(fc2), "relu", name = "relu2")
-    val fc3 = Symbol.api.FullyConnected(Some(act2), None, None, 10, name = "fc3")
-    val mlp = Symbol.api.SoftmaxOutput(name = "softmax", data = Some(fc3))
+    val fc1 = Symbol.api.FullyConnected(data = data, num_hidden = 128, name = "fc1")
+    val act1 = Symbol.api.Activation (data = fc1, "relu", name = "relu")
+    val fc2 = Symbol.api.FullyConnected(act1, null, null, 64, name = "fc2")
+    val act2 = Symbol.api.Activation(data = fc2, "relu", name = "relu2")
+    val fc3 = Symbol.api.FullyConnected(act2, null, null, 10, name = "fc3")
+    val mlp = Symbol.api.SoftmaxOutput(name = "softmax", data = fc3)
     mlp
   }
 
   def getLenet: Symbol = {
     val data = Symbol.Variable("data")
     // first conv
-    val conv1 = Symbol.api.Convolution(data = Some(data), kernel = Shape(5, 5), num_filter = 20)
-    val tanh1 = Symbol.api.tanh(data = Some(conv1))
-    val pool1 = Symbol.api.Pooling(data = Some(tanh1), pool_type = Some("max"),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)))
+    val conv1 = Symbol.api.Convolution(data = data, kernel = Shape(5, 5), num_filter = 20)
+    val tanh1 = Symbol.api.tanh(data = conv1)
+    val pool1 = Symbol.api.Pooling(data = tanh1, pool_type = "max",
+      kernel = Shape(2, 2), stride = Shape(2, 2))
     // second conv
-    val conv2 = Symbol.api.Convolution(data = Some(pool1), kernel = Shape(5, 5), num_filter = 50)
-    val tanh2 = Symbol.api.tanh(data = Some(conv2))
-    val pool2 = Symbol.api.Pooling(data = Some(tanh2), pool_type = Some("max"),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)))
+    val conv2 = Symbol.api.Convolution(data = pool1, kernel = Shape(5, 5), num_filter = 50)
+    val tanh2 = Symbol.api.tanh(data = conv2)
+    val pool2 = Symbol.api.Pooling(data = tanh2, pool_type = "max",
+      kernel = Shape(2, 2), stride = Shape(2, 2))
     // first fullc
-    val flatten = Symbol.api.Flatten(data = Some(pool2))
-    val fc1 = Symbol.api.FullyConnected(data = Some(flatten), num_hidden = 500)
-    val tanh3 = Symbol.api.tanh(data = Some(fc1))
+    val flatten = Symbol.api.Flatten(data = pool2)
+    val fc1 = Symbol.api.FullyConnected(data = flatten, num_hidden = 500)
+    val tanh3 = Symbol.api.tanh(data = fc1)
     // second fullc
-    val fc2 = Symbol.api.FullyConnected(data = Some(tanh3), num_hidden = 10)
+    val fc2 = Symbol.api.FullyConnected(data = tanh3, num_hidden = 10)
     // loss
-    val lenet = Symbol.api.SoftmaxOutput(name = "softmax", data = Some(fc2))
+    val lenet = Symbol.api.SoftmaxOutput(name = "softmax", data = fc2)
     lenet
   }
 
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/multitask/ExampleMultiTask.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/multitask/ExampleMultiTask.scala
index bfde55831e2..7fbc23eafdf 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/multitask/ExampleMultiTask.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/multitask/ExampleMultiTask.scala
@@ -41,13 +41,13 @@ object ExampleMultiTask {
 
   def buildNetwork(): Symbol = {
     val data = Symbol.Variable("data")
-    val fc1 = Symbol.api.FullyConnected(data = Some(data), num_hidden = 128)
-    val act1 = Symbol.api.Activation(data = Some(fc1), act_type = "relu")
-    val fc2 = Symbol.api.FullyConnected(data = Some(act1), num_hidden = 64)
-    val act2 = Symbol.api.Activation(data = Some(fc2), act_type = "relu")
-    val fc3 = Symbol.api.FullyConnected(data = Some(act2), num_hidden = 10)
-    val sm1 = Symbol.api.SoftmaxOutput(data = Some(fc3))
-    val sm2 = Symbol.api.SoftmaxOutput(data = Some(fc3))
+    val fc1 = Symbol.api.FullyConnected(data = data, num_hidden = 128)
+    val act1 = Symbol.api.Activation(data = fc1, act_type = "relu")
+    val fc2 = Symbol.api.FullyConnected(data = act1, num_hidden = 64)
+    val act2 = Symbol.api.Activation(data = fc2, act_type = "relu")
+    val fc3 = Symbol.api.FullyConnected(data = act2, num_hidden = 10)
+    val sm1 = Symbol.api.SoftmaxOutput(data = fc3)
+    val sm2 = Symbol.api.SoftmaxOutput(data = fc3)
 
     val softmax = Symbol.Group(sm1, sm2)
 
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/ModelVgg19.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/ModelVgg19.scala
index ca4c242ab1c..cb805ef9601 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/ModelVgg19.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/ModelVgg19.scala
@@ -29,11 +29,11 @@ object ModelVgg19 {
   def ConvRelu(data : Symbol, convName : String, reluName : String,
                numFilter : Int, kernel : (Int, Int) = (3, 3),
                stride : (Int, Int) = (1, 1)) : Symbol = {
-    val conv = Symbol.api.Convolution(data = Some(data), num_filter = numFilter,
-      pad = Some(Shape(1, 1)), kernel = Shape(kernel._1, kernel._2),
-      stride = Some(Shape(stride._1, stride._2)), no_bias = Some(false),
-      workspace = Some(1024), name = convName)
-    val relu = Symbol.api.relu(data = Some(conv), name = reluName)
+    val conv = Symbol.api.Convolution(data = data, num_filter = numFilter,
+      pad = Shape(1, 1), kernel = Shape(kernel._1, kernel._2),
+      stride = Shape(stride._1, stride._2), no_bias = false,
+      workspace = 1024.toLong, name = convName)
+    val relu = Symbol.api.relu(data = conv, name = reluName)
     conv.dispose()
     relu
   }
@@ -48,30 +48,30 @@ object ModelVgg19 {
 
     val relu1_1 = ConvRelu(data, s"${prefix}conv1_1", s"${prefix}relu1_1", 64)
     val relu1_2 = ConvRelu(relu1_1, s"${prefix}conv1_2", s"${prefix}relu1_2", 64)
-    val pool1 = Symbol.api.Pooling(data = Some(relu1_2), pad = Some(Shape(0, 0)),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)), pool_type = Some("avg"),
+    val pool1 = Symbol.api.Pooling(data = relu1_2, pad = Shape(0, 0),
+      kernel = Shape(2, 2), stride = Shape(2, 2), pool_type = "avg",
       name = s"${prefix}pool1")
 
     val relu2_1 = ConvRelu(pool1, s"${prefix}conv2_1", s"${prefix}relu2_1", 128)
     val relu2_2 = ConvRelu(relu2_1, s"${prefix}conv2_2", s"${prefix}relu2_2", 128)
-    val pool2 = Symbol.api.Pooling(data = Some(relu2_2), pad = Some(Shape(0, 0)),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)), pool_type = Some("avg"),
+    val pool2 = Symbol.api.Pooling(data = relu2_2, pad = Shape(0, 0),
+      kernel = Shape(2, 2), stride = Shape(2, 2), pool_type = "avg",
       name = s"${prefix}pool2")
 
     val relu3_1 = ConvRelu(pool2, s"${prefix}conv3_1", s"${prefix}relu3_1", 256)
     val relu3_2 = ConvRelu(relu3_1, s"${prefix}conv3_2", s"${prefix}relu3_2", 256)
     val relu3_3 = ConvRelu(relu3_2, s"${prefix}conv3_3", s"${prefix}relu3_3", 256)
     val relu3_4 = ConvRelu(relu3_3, s"${prefix}conv3_4", s"${prefix}relu3_4", 256)
-    val pool3 = Symbol.api.Pooling(data = Some(relu3_4), pad = Some(Shape(0, 0)),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)), pool_type = Some("avg"),
+    val pool3 = Symbol.api.Pooling(data = relu3_4, pad = Shape(0, 0),
+      kernel = Shape(2, 2), stride = Shape(2, 2), pool_type = "avg",
       name = s"${prefix}pool3")
 
     val relu4_1 = ConvRelu(pool3, s"${prefix}conv4_1", s"${prefix}relu4_1", 512)
     val relu4_2 = ConvRelu(relu4_1, s"${prefix}conv4_2", s"${prefix}relu4_2", 512)
     val relu4_3 = ConvRelu(relu4_2, s"${prefix}conv4_3", s"${prefix}relu4_3", 512)
     val relu4_4 = ConvRelu(relu4_3, s"${prefix}conv4_4", s"${prefix}relu4_4", 512)
-    val pool4 = Symbol.api.Pooling(data = Some(relu4_4), pad = Some(Shape(0, 0)),
-      kernel = Some(Shape(2, 2)), stride = Some(Shape(2, 2)), pool_type = Some("avg"),
+    val pool4 = Symbol.api.Pooling(data = relu4_4, pad = Shape(0, 0),
+      kernel = Shape(2, 2), stride = Shape(2, 2), pool_type = "avg",
       name = s"${prefix}pool4")
 
     val relu5_1 = ConvRelu(pool4, s"${prefix}conv5_1", s"${prefix}relu5_1", 512)
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala
index 1767cabcbae..e7c78bd80b3 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala
@@ -109,10 +109,10 @@ object NeuralStyle {
     var gradScale = List[Int]()
     for (i <- 0 until style.listOutputs().length) {
       val shape = outputShape(i)
-      val x = Symbol.api.Reshape(data = Some(style.get(i)),
-        target_shape = Some(Shape(shape(1), shape(2) * shape(3))))
-      val gram = Symbol.api.FullyConnected(data = Some(x), weight = Some(x),
-        no_bias = Some(true), num_hidden = shape(1))
+      val x = Symbol.api.Reshape(data = style.get(i),
+        target_shape = Shape(shape(1), shape(2) * shape(3)))
+      val gram = Symbol.api.FullyConnected(data = x, weight = x,
+        no_bias = true, num_hidden = shape(1))
       x.dispose()
       gramList = gramList :+ gram
       gradScale = gradScale :+ (shape(1) * shape(2) * shape(3) * shape(1))
@@ -124,16 +124,16 @@ object NeuralStyle {
     var gramLoss = ListBuffer[Symbol]()
     for (i <- 0 until gram.listOutputs().length) {
       val gvar = Symbol.Variable(s"target_gram_$i")
-      Symbol.api.square(data = Some(gvar - gram.get(i)))
+      Symbol.api.square(data = gvar - gram.get(i))
       gramLoss += Symbol.api.sum(
-        Some(Symbol.api.square(data = Some(gvar - gram.get(i))))
+        Symbol.api.square(data = gvar - gram.get(i))
       )
       gvar.dispose()
     }
     gram.dispose()
     val cvar = Symbol.Variable("target_content")
     val contentLoss = Symbol.api.sum(
-      Some(Symbol.api.square(Some(cvar - content)))
+      Symbol.api.square(cvar - content)
     )
     (Symbol.Group(gramLoss: _*), contentLoss)
   }
@@ -145,11 +145,11 @@ object NeuralStyle {
     val nChannel = img.shape(1)
     val sImg = Symbol.Variable("img")
     val sKernel = Symbol.Variable("kernel")
-    val channels = Symbol.api.SliceChannel(data = Some(sImg), num_outputs = nChannel)
+    val channels = Symbol.api.SliceChannel(data = sImg, num_outputs = nChannel)
     val result = (0 until nChannel).map { i =>
-      Symbol.api.Convolution(data = Some(channels.get(i)), weight = Some(sKernel),
-        num_filter = 1, kernel = Shape(3, 3), pad = Some(Shape(1, 1)), no_bias = Some(true),
-        stride = Some(Shape(1, 1)))
+      Symbol.api.Convolution(data = channels.get(i), weight = sKernel,
+        num_filter = 1, kernel = Shape(3, 3), pad = Shape(1, 1), no_bias = true,
+        stride = Shape(1, 1))
     }.toArray
     val out = Symbol.api.Concat(result, result.length) * tvWeight
     val kernel = {
@@ -247,7 +247,7 @@ object NeuralStyle {
             optimizer.update(0, img,
               modelExecutor.dataGrad + executor.outputs(0),
               optimState)
-          }
+        }
           case None =>
             optimizer.update(0, img, modelExecutor.dataGrad, optimState)
         }
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/Basic.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/Basic.scala
index 56303253f33..f78dd2dab76 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/Basic.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/Basic.scala
@@ -70,10 +70,10 @@ object Basic {
     var gradScale = List[Int]()
     for (i <- 0 until style.listOutputs().length) {
       val shape = outputShape(i)
-      val x = Symbol.api.Reshape(data = Some(style.get(i)),
-        shape = Some(Shape(shape(1), shape(2) * shape(3))))
-      val gram = Symbol.api.FullyConnected(data = Some(x), weight = Some(x),
-        no_bias = Some(true), num_hidden = shape(1))
+      val x = Symbol.api.Reshape(data = style.get(i),
+        shape = Shape(shape(1), shape(2) * shape(3)))
+      val gram = Symbol.api.FullyConnected(data = x, weight = x,
+        no_bias = true, num_hidden = shape(1))
       gramList = gramList :+ gram
       gradScale = gradScale :+ (shape(1) * shape(2) * shape(3) * shape(1))
     }
@@ -84,12 +84,12 @@ object Basic {
     var gramLoss = List[Symbol]()
     for (i <- 0 until gram.listOutputs().length) {
       val gvar = Symbol.Variable(s"target_gram_$i")
-      gramLoss = gramLoss :+ Symbol.api.sum(Some(
-        Symbol.api.square(Some(gvar - gram.get(i)))
-      ))
+      gramLoss = gramLoss :+ Symbol.api.sum(
+        Symbol.api.square(gvar - gram.get(i))
+      )
     }
     val cvar = Symbol.Variable("target_content")
-    val contentLoss = Symbol.api.sum(Some(Symbol.api.square(Some(cvar - content))))
+    val contentLoss = Symbol.api.sum(Symbol.api.square(cvar - content))
     (Symbol.Group(gramLoss: _*), contentLoss)
   }
 
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala
index 8246f44bae2..ab811b92ac5 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala
@@ -39,11 +39,11 @@ object BoostTrain {
     val nChannel = img.shape(1)
     val sImg = Symbol.Variable("img")
     val sKernel = Symbol.Variable("kernel")
-    val channels = Symbol.api.SliceChannel(data = Some(sImg), num_outputs = nChannel)
+    val channels = Symbol.api.SliceChannel(data = sImg, num_outputs = nChannel)
     val toConcat = (0 until nChannel).map( i =>
-      Symbol.api.Convolution(data = Some(channels.get(i)), weight = Some(sKernel),
-        num_filter = 1, kernel = Shape(3, 3), pad = Some(Shape(1, 1)),
-        no_bias = Some(true), stride = Some(Shape(1, 1)))
+      Symbol.api.Convolution(data = channels.get(i), weight = sKernel,
+        num_filter = 1, kernel = Shape(3, 3), pad = Shape(1, 1),
+        no_bias = true, stride = Shape(1, 1))
     ).toArray
     val out = Symbol.api.Concat(data = toConcat, num_args = toConcat.length) * tvWeight
     val kernel = {
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV3.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV3.scala
index d7ab59e2840..f0ab1cca454 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV3.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV3.scala
@@ -23,11 +23,11 @@ import org.apache.mxnet.{Context, Shape, Symbol, Xavier}
 object GenV3 {
   def Conv(data: Symbol, numFilter: Int, kernel: (Int, Int) = (5, 5),
            pad: (Int, Int) = (2, 2), stride: (Int, Int) = (2, 2)): Symbol = {
-    val sym1 = Symbol.api.Convolution(data = Some(data), num_filter = numFilter,
-      kernel = Shape(kernel._1, kernel._2), stride = Some(Shape(stride._1, stride._2)),
-      pad = Some(Shape(pad._1, pad._2)), no_bias = Some(false))
-    val sym2 = Symbol.api.BatchNorm(data = Some(sym1), fix_gamma = Some(false))
-    val sym3 = Symbol.api.LeakyReLU(data = Some(sym2), act_type = Some("leaky"))
+    val sym1 = Symbol.api.Convolution(data = data, num_filter = numFilter,
+      kernel = Shape(kernel._1, kernel._2), stride = Shape(stride._1, stride._2),
+      pad = Shape(pad._1, pad._2), no_bias = false)
+    val sym2 = Symbol.api.BatchNorm(data = sym1, fix_gamma = false)
+    val sym3 = Symbol.api.LeakyReLU(data = sym2, act_type = "leaky")
     sym2.dispose()
     sym1.dispose()
     sym3
@@ -36,14 +36,14 @@ object GenV3 {
   def Deconv(data: Symbol, numFilter: Int, imHw: (Int, Int),
              kernel: (Int, Int) = (7, 7), pad: (Int, Int) = (2, 2), stride: (Int, Int) = (2, 2),
              crop: Boolean = true, out: Boolean = false): Symbol = {
-    var sym = Symbol.api.Deconvolution(data = Some(data), num_filter = numFilter,
-      kernel = Shape(kernel._1, kernel._2), stride = Some(Shape(stride._1, stride._2)),
-      pad = Some(Shape(pad._1, pad._2)), no_bias = Some(true))
-    if (crop) sym = Symbol.api.Crop(data = Array(sym), offset = Some(Shape(1, 1)),
-      h_w = Some(Shape(imHw._1, imHw._2)), num_args = 1)
-    sym = Symbol.api.BatchNorm(data = Some(sym), fix_gamma = Some(false))
-    if (out == false) Symbol.api.LeakyReLU(data = Some(sym), act_type = Some("leaky"))
-    else Symbol.api.Activation(data = Some(sym), act_type = "tanh")
+    var sym = Symbol.api.Deconvolution(data = data, num_filter = numFilter,
+      kernel = Shape(kernel._1, kernel._2), stride = Shape(stride._1, stride._2),
+      pad = Shape(pad._1, pad._2), no_bias = true)
+    if (crop) sym = Symbol.api.Crop(data = Array(sym), offset = Shape(1, 1),
+      h_w = Shape(imHw._1, imHw._2), num_args = 1)
+    sym = Symbol.api.BatchNorm(data = sym, fix_gamma = false)
+    if (out == false) Symbol.api.LeakyReLU(data = sym, act_type = "leaky")
+    else Symbol.api.Activation(data = sym, act_type = "tanh")
   }
 
   def getGenerator(prefix: String, imHw: (Int, Int)): Symbol = {
@@ -60,7 +60,7 @@ object GenV3 {
     val conv5_1 = Conv(deconv2, 96, kernel = (3, 3), pad = (1, 1), stride = (1, 1))
     val deconv3 = Deconv(conv5_1, 3, imHw, kernel = (8, 8), pad = (3, 3), out = true, crop = false)
     val rawOut = (deconv3 * 128) + 128
-    val norm = Symbol.api.SliceChannel(data = Some(rawOut), num_outputs = 3)
+    val norm = Symbol.api.SliceChannel(data = rawOut, num_outputs = 3)
     val rCh = norm.get(0) - 123.68f
     val gCh = norm.get(1) - 116.779f
     val bCh = norm.get(2) - 103.939f
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV4.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV4.scala
index 82fc9b6ce10..11bc5dad453 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV4.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/GenV4.scala
@@ -24,11 +24,11 @@ object GenV4 {
 
   def Conv(data: Symbol, numFilter: Int, workspace : Long, kernel: (Int, Int) = (5, 5),
            pad: (Int, Int) = (2, 2)): Symbol = {
-    val sym1 = Symbol.api.Convolution(data = Some(data), num_filter = numFilter,
-      kernel = Shape(kernel._1, kernel._2), workspace = Some(workspace),
-      pad = Some(Shape(pad._1, pad._2)), no_bias = Some(false))
-    val sym2 = Symbol.api.BatchNorm(data = Some(sym1), fix_gamma = Some(false))
-    val sym3 = Symbol.api.LeakyReLU(data = Some(sym2), act_type = Some("leaky"))
+    val sym1 = Symbol.api.Convolution(data = data, num_filter = numFilter,
+      kernel = Shape(kernel._1, kernel._2), workspace = workspace,
+      pad = Shape(pad._1, pad._2), no_bias = false)
+    val sym2 = Symbol.api.BatchNorm(data = sym1, fix_gamma = false)
+    val sym3 = Symbol.api.LeakyReLU(data = sym2, act_type = "leaky")
     sym2.dispose()
     sym1.dispose()
     sym3
@@ -43,12 +43,12 @@ object GenV4 {
     var conv4_1 = Conv(conv3_1, 32, 4096)
     var conv5_1 = Conv(conv4_1, 48, 4096)
     var conv6_1 = Conv(conv5_1, 32, 4096)
-    var out = Symbol.api.Convolution(data = Some(conv6_1), num_filter = 3, kernel = Shape(3, 3),
-      pad = Some(Shape(1, 1)), no_bias = Some(true), workspace = Some(4096))
-    out = Symbol.api.BatchNorm(data = Some(out), fix_gamma = Some(false))
-    out = Symbol.api.Activation(data = Some(out), act_type = "tanh")
+    var out = Symbol.api.Convolution(data = conv6_1, num_filter = 3, kernel = Shape(3, 3),
+      pad = Shape(1, 1), no_bias = true, workspace = 4096.toLong)
+    out = Symbol.api.BatchNorm(data = out, fix_gamma = false)
+    out = Symbol.api.Activation(data = out, act_type = "tanh")
     val rawOut = (out * 128) + 128
-    val norm = Symbol.api.SliceChannel(data = Some(rawOut), num_outputs = 3)
+    val norm = Symbol.api.SliceChannel(data = rawOut, num_outputs = 3)
     val rCh = norm.get(0) - 123.68f
     val gCh = norm.get(1) - 116.779f
     val bCh = norm.get(2) - 103.939f
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerMatMul.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerMatMul.scala
index 95c5d77c877..db7df27afbe 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerMatMul.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerMatMul.scala
@@ -47,7 +47,7 @@ object ProfilerMatMul {
 
       val A = Symbol.Variable("A")
       val B = Symbol.Variable("B")
-      val C = Symbol.api.dot(Some(A), Some(B))
+      val C = Symbol.api.dot(A, B)
 
       val executor = C.simpleBind(ctx, "write",
           Map("A" -> Shape(4096, 4096), "B" -> Shape(4096, 4096)))
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerNDArray.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerNDArray.scala
index 3e803400725..1fbf2ddd8e7 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerNDArray.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/profiler/ProfilerNDArray.scala
@@ -47,7 +47,7 @@ object ProfilerNDArray {
         val randomRet = (0 until shape.product)
           .map(r => scala.util.Random.nextFloat() - 0.5f).toArray
         dat.set(randomRet)
-        val ndArrayRet = NDArray.api.broadcast_to(dat, Some(targetShape))
+        val ndArrayRet = NDArray.api.broadcast_to(dat, targetShape)
         require(ndArrayRet.shape == targetShape)
         val err = {
           // implementation of broadcast
diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/rnn/Lstm.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/rnn/Lstm.scala
index 872ef7871fb..aa4169d8d00 100644
--- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/rnn/Lstm.scala
+++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/rnn/Lstm.scala
@@ -32,22 +32,22 @@ object Lstm {
   def lstm(numHidden: Int, inData: Symbol, prevState: LSTMState,
            param: LSTMParam, seqIdx: Int, layerIdx: Int, dropout: Float = 0f): LSTMState = {
     val inDataa = {
-      if (dropout > 0f) Symbol.api.Dropout(data = Some(inData), p = Some(dropout))
+      if (dropout > 0f) Symbol.api.Dropout(data = inData, p = dropout)
       else inData
     }
-    val i2h = Symbol.api.FullyConnected(data = Some(inDataa), weight = Some(param.i2hWeight),
-      bias = Some(param.i2hBias), num_hidden = numHidden * 4, name = s"t${seqIdx}_l${layerIdx}_i2h")
-    val h2h = Symbol.api.FullyConnected(data = Some(prevState.h), weight = Some(param.h2hWeight),
-      bias = Some(param.h2hBias), num_hidden = numHidden * 4, name = s"t${seqIdx}_l${layerIdx}_h2h")
+    val i2h = Symbol.api.FullyConnected(data = inDataa, weight = param.i2hWeight,
+      bias = param.i2hBias, num_hidden = numHidden * 4, name = s"t${seqIdx}_l${layerIdx}_i2h")
+    val h2h = Symbol.api.FullyConnected(data = prevState.h, weight = param.h2hWeight,
+      bias = param.h2hBias, num_hidden = numHidden * 4, name = s"t${seqIdx}_l${layerIdx}_h2h")
     val gates = i2h + h2h
-    val sliceGates = Symbol.api.SliceChannel(data = Some(gates), num_outputs = 4,
+    val sliceGates = Symbol.api.SliceChannel(data = gates, num_outputs = 4,
       name = s"t${seqIdx}_l${layerIdx}_slice")
-    val ingate = Symbol.api.Activation(data = Some(sliceGates.get(0)), act_type = "sigmoid")
-    val inTransform = Symbol.api.Activation(data = Some(sliceGates.get(1)), act_type = "tanh")
-    val forgetGate = Symbol.api.Activation(data = Some(sliceGates.get(2)), act_type = "sigmoid")
-    val outGate = Symbol.api.Activation(data = Some(sliceGates.get(3)), act_type = "sigmoid")
+    val ingate = Symbol.api.Activation(data = sliceGates.get(0), act_type = "sigmoid")
+    val inTransform = Symbol.api.Activation(data = sliceGates.get(1), act_type = "tanh")
+    val forgetGate = Symbol.api.Activation(data = sliceGates.get(2), act_type = "sigmoid")
+    val outGate = Symbol.api.Activation(data = sliceGates.get(3), act_type = "sigmoid")
     val nextC = (forgetGate * prevState.c) + (ingate * inTransform)
-    val nextH = outGate * Symbol.api.Activation(data = Some(nextC), "tanh")
+    val nextH = outGate * Symbol.api.Activation(data = nextC, "tanh")
     LSTMState(c = nextC, h = nextH)
   }
 
@@ -79,10 +79,10 @@ object Lstm {
     // embeding layer
     val data = Symbol.Variable("data")
     var label = Symbol.Variable("softmax_label")
-    val embed = Symbol.api.Embedding(data = Some(data), input_dim = inputSize,
-      weight = Some(embedWeight), output_dim = numEmbed, name = "embed")
-    val wordvec = Symbol.api.SliceChannel(data = Some(embed),
-      num_outputs = seqLen, squeeze_axis = Some(true))
+    val embed = Symbol.api.Embedding(data = data, input_dim = inputSize,
+      weight = embedWeight, output_dim = numEmbed, name = "embed")
+    val wordvec = Symbol.api.SliceChannel(data = embed,
+      num_outputs = seqLen, squeeze_axis = true)
 
     val hiddenAll = ArrayBuffer[Symbol]()
     var dpRatio = 0f
@@ -100,16 +100,16 @@ object Lstm {
         lastStates(i) = nextState
       }
       // decoder
-      if (dropout > 0f) hidden = Symbol.api.Dropout(data = Some(hidden), p = Some(dropout))
+      if (dropout > 0f) hidden = Symbol.api.Dropout(data = hidden, p = dropout)
       hiddenAll.append(hidden)
     }
     val hiddenConcat = Symbol.api.Concat(data = hiddenAll.toArray, num_args = hiddenAll.length,
-      dim = Some(0))
-    val pred = Symbol.api.FullyConnected(data = Some(hiddenConcat), num_hidden = numLabel,
-      weight = Some(clsWeight), bias = Some(clsBias))
-    label = Symbol.api.transpose(data = Some(label))
-    label = Symbol.api.Reshape(data = Some(label), target_shape = Some(Shape(0)))
-    val sm = Symbol.api.SoftmaxOutput(data = Some(pred), label = Some(label), name = "softmax")
+      dim = 0)
+    val pred = Symbol.api.FullyConnected(data = hiddenConcat, num_hidden = numLabel,
+      weight = clsWeight, bias = clsBias)
+    label = Symbol.api.transpose(data = label)
+    label = Symbol.api.Reshape(data = label, target_shape = Shape(0))
+    val sm = Symbol.api.SoftmaxOutput(data = pred, label = label, name = "softmax")
     sm
   }
 
@@ -134,8 +134,8 @@ object Lstm {
 
     val data = Symbol.Variable("data")
 
-    var hidden = Symbol.api.Embedding(data = Some(data), input_dim = inputSize,
-      weight = Some(embedWeight), output_dim = numEmbed, name = "embed")
+    var hidden = Symbol.api.Embedding(data = data, input_dim = inputSize,
+      weight = embedWeight, output_dim = numEmbed, name = "embed")
 
     var dpRatio = 0f
     // stack LSTM
@@ -149,10 +149,10 @@ object Lstm {
       lastStates(i) = nextState
     }
     // decoder
-    if (dropout > 0f) hidden = Symbol.api.Dropout(data = Some(hidden), p = Some(dropout))
-    val fc = Symbol.api.FullyConnected(data = Some(hidden),
-      num_hidden = numLabel, weight = Some(clsWeight), bias = Some(clsBias))
-    val sm = Symbol.api.SoftmaxOutput(data = Some(fc), name = "softmax")
+    if (dropout > 0f) hidden = Symbol.api.Dropout(data = hidden, p = dropout)
+    val fc = Symbol.api.FullyConnected(data = hidden,
+      num_hidden = numLabel, weight = clsWeight, bias = clsBias)
+    val sm = Symbol.api.SoftmaxOutput(data = fc, name = "softmax")
     var output = Array(sm)
     for (state <- lastStates) {
       output = output :+ state.c
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
index b4efa659443..a72dbefecb3 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala
@@ -139,7 +139,7 @@ private[mxnet] object APIDocGenerator{
         case _ => absClassArg.argName
       }
       if (absClassArg.isOptional) {
-        argDef += s"$currArgName : Option[${absClassArg.argType}] = None"
+        argDef += s"$currArgName : ${absClassArg.argType} = null"
       }
       else {
         argDef += s"$currArgName : ${absClassArg.argType}"
@@ -150,7 +150,7 @@ private[mxnet] object APIDocGenerator{
       argDef += "name : String = null"
       argDef += "attr : Map[String, String] = null"
     } else {
-      argDef += "out : Option[NDArray] = None"
+      argDef += "out : NDArray = null"
       returnType = "org.apache.mxnet.NDArrayFuncReturn"
     }
     val experimentalTag = "@Experimental"
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
index 2d3a1c7ec5a..b7736af70b6 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala
@@ -111,7 +111,7 @@ private[mxnet] object NDArrayMacro {
           case default => ndarrayarg.argName
         }
         if (ndarrayarg.isOptional) {
-          argDef += s"${currArgName} : Option[${ndarrayarg.argType}] = None"
+          argDef += s"${currArgName} : ${ndarrayarg.argType} = null"
         }
         else {
           argDef += s"${currArgName} : ${ndarrayarg.argType}"
@@ -133,13 +133,13 @@ private[mxnet] object NDArrayMacro {
             "map(\"" + ndarrayarg.argName + "\") = " + currArgName
           }
         impl.append(
-          if (ndarrayarg.isOptional) s"if (!$currArgName.isEmpty) $base.get"
+          if (ndarrayarg.isOptional) s"if ($currArgName != null) $base"
           else base
         )
       })
       // add default out parameter
-      argDef += "out : Option[NDArray] = None"
-      impl += "if (!out.isEmpty) map(\"out\") = out.get"
+      argDef += "out : NDArray = null"
+      impl += "if (out != null) map(\"out\") = out"
       // scalastyle:off
       impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)"
       // scalastyle:on
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
index 42aa11781d8..1f779b29767 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala
@@ -117,7 +117,7 @@ private[mxnet] object SymbolImplMacros {
           case default => symbolarg.argName
         }
         if (symbolarg.isOptional) {
-          argDef += s"${currArgName} : Option[${symbolarg.argType}] = None"
+          argDef += s"${currArgName} : ${symbolarg.argType} = null"
         }
         else {
           argDef += s"${currArgName} : ${symbolarg.argType}"
@@ -126,12 +126,12 @@ private[mxnet] object SymbolImplMacros {
         val returnType = "org.apache.mxnet.Symbol"
         val base =
         if (symbolarg.argType.equals(s"Array[$returnType]")) {
-          if (symbolarg.isOptional) s"if (!$currArgName.isEmpty) args = $currArgName.get.toSeq"
+          if (symbolarg.isOptional) s"if ($currArgName != null) args = $currArgName.toSeq"
           else s"args = $currArgName.toSeq"
         } else {
           if (symbolarg.isOptional) {
             // scalastyle:off
-            s"if (!$currArgName.isEmpty) map(" + "\"" + symbolarg.argName + "\"" + s") = $currArgName.get"
+            s"if ($currArgName != null) map(" + "\"" + symbolarg.argName + "\"" + s") = $currArgName"
             // scalastyle:on
           }
           else "map(\"" + symbolarg.argName + "\"" + s") = $currArgName"
diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala
index b07e6f97eee..f5bc56dc202 100644
--- a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala
+++ b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala
@@ -28,12 +28,12 @@ private[mxnet] object CToScalaUtils {
       case "Symbol" | "NDArray" | "NDArray-or-Symbol" => returnType
       case "Symbol[]" | "NDArray[]" | "NDArray-or-Symbol[]" | "SymbolorSymbol[]"
       => s"Array[$returnType]"
-      case "float" | "real_t" | "floatorNone" => "org.apache.mxnet.Base.MXFloat"
-      case "int" | "intorNone" | "int(non-negative)" => "Int"
-      case "long" | "long(non-negative)" => "Long"
-      case "double" | "doubleorNone" => "Double"
+      case "float" | "real_t" | "floatorNone" => "java.lang.Float"
+      case "int" | "intorNone" | "int(non-negative)" => "java.lang.Integer"
+      case "long" | "long(non-negative)" => "java.lang.Long"
+      case "double" | "doubleorNone" => "java.lang.Double"
       case "string" => "String"
-      case "boolean" | "booleanorNone" => "Boolean"
+      case "boolean" | "booleanorNone" => "java.lang.Boolean"
       case "tupleof<float>" | "tupleof<double>" | "tupleof<>" | "ptr" | "" => "Any"
       case default => throw new IllegalArgumentException(
         s"Invalid type for args: $default\nString argType: $argType\nargName: $argName")
diff --git a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala
index c3a7c58c1af..4404b0885d5 100644
--- a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala
+++ b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala
@@ -36,7 +36,7 @@ class MacrosSuite extends FunSuite with BeforeAndAfterAll {
     )
     val output = List(
       ("org.apache.mxnet.Symbol", true),
-      ("Int", false),
+      ("java.lang.Integer", false),
       ("org.apache.mxnet.Shape", true),
       ("String", true),
       ("Any", false)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services