You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by ns...@apache.org on 2018/11/14 23:01:19 UTC
[incubator-mxnet] branch master updated: Fix scaladoc build errors
(#13189)
This is an automated email from the ASF dual-hosted git repository.
nswamy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new 8cb73ef Fix scaladoc build errors (#13189)
8cb73ef is described below
commit 8cb73efb521f3653bd262022c5840afb854b98e2
Author: Zach Kimberg <za...@kimberg.com>
AuthorDate: Wed Nov 14 15:01:05 2018 -0800
Fix scaladoc build errors (#13189)
* Fix scaladoc errors from missing classpath
Remove duplicate scalastyle plugin
* Fix scaladoc warnings
Also enable and fix all feature and deprecation warnings
---
docs/mxdoc.py | 9 +++++++--
scala-package/core/pom.xml | 4 ----
scala-package/core/src/main/scala/org/apache/mxnet/Context.scala | 2 ++
.../core/src/main/scala/org/apache/mxnet/Executor.scala | 5 -----
scala-package/core/src/main/scala/org/apache/mxnet/IO.scala | 7 ++++---
scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala | 2 +-
scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala | 1 +
.../core/src/main/scala/org/apache/mxnet/Optimizer.scala | 2 +-
.../core/src/main/scala/org/apache/mxnet/ResourceScope.scala | 6 +++---
scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala | 1 +
.../core/src/main/scala/org/apache/mxnet/Visualization.scala | 1 +
.../core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala | 4 ++--
.../core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala | 4 ++--
.../src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala | 4 ++--
.../core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala | 4 ++--
.../core/src/main/scala/org/apache/mxnet/javaapi/Context.scala | 1 +
.../core/src/main/scala/org/apache/mxnet/javaapi/IO.scala | 2 ++
.../core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala | 1 +
.../core/src/main/scala/org/apache/mxnet/module/BaseModule.scala | 6 +++---
.../src/main/scala/org/apache/mxnet/module/BucketingModule.scala | 4 ++--
.../org/apache/mxnet/module/DataParallelExecutorGroup.scala | 4 ++--
.../core/src/main/scala/org/apache/mxnet/module/Module.scala | 4 ++--
.../main/scala/org/apache/mxnet/module/SequentialModule.scala | 4 ++--
23 files changed, 44 insertions(+), 38 deletions(-)
diff --git a/docs/mxdoc.py b/docs/mxdoc.py
index 8570cae..8b26c89 100644
--- a/docs/mxdoc.py
+++ b/docs/mxdoc.py
@@ -110,8 +110,13 @@ def build_scala(app):
def build_scala_docs(app):
"""build scala doc and then move the outdir"""
scala_path = app.builder.srcdir + '/../scala-package'
- # scaldoc fails on some apis, so exit 0 to pass the check
- _run_cmd('cd ' + scala_path + '; scaladoc `find . -type f -name "*.scala" | egrep \"\/core|\/infer\" | egrep -v \"Suite|javaapi\"`; exit 0')
+ scala_doc_sources = 'find . -type f -name "*.scala" | egrep \"\.\/core|\.\/infer\" | egrep -v \"Suite\"'
+ scala_doc_classpath = ':'.join([
+ '`find native -name "*.jar" | grep "target/lib/" | tr "\\n" ":" `',
+ '`find macros -name "*-SNAPSHOT.jar" | tr "\\n" ":" `'
+ ])
+ _run_cmd('cd {}; scaladoc `{}` -classpath {} -feature -deprecation'
+ .format(scala_path, scala_doc_sources, scala_doc_classpath))
dest_path = app.builder.outdir + '/api/scala/docs'
_run_cmd('rm -rf ' + dest_path)
_run_cmd('mkdir -p ' + dest_path)
diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml
index e93169f..56ff4db 100644
--- a/scala-package/core/pom.xml
+++ b/scala-package/core/pom.xml
@@ -93,10 +93,6 @@
<groupId>org.scalastyle</groupId>
<artifactId>scalastyle-maven-plugin</artifactId>
</plugin>
- <plugin>
- <groupId>org.scalastyle</groupId>
- <artifactId>scalastyle-maven-plugin</artifactId>
- </plugin>
</plugins>
</build>
<dependencies>
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
index beeb430..ab44f43 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
@@ -17,6 +17,8 @@
package org.apache.mxnet
+import scala.language.implicitConversions
+
object Context {
val devtype2str = Map(1 -> "cpu", 2 -> "gpu", 3 -> "cpu_pinned")
val devstr2type = Map("cpu" -> 1, "gpu" -> 2, "cpu_pinned" -> 3)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
index 19fb6fe..b342a96 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
@@ -224,7 +224,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of argument arrrays.
* @return The dictionary that maps name of arguments to NDArrays.
- * @throws IllegalArgumentException if there are duplicated names in the arguments.
*/
def argDict: Map[String, NDArray] = {
if (_argDict == null) {
@@ -236,7 +235,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of gradient arrays.
* @return The dictionary that maps name of arguments to gradient arrays.
- * @throws IllegalArgumentException if there are duplicated names in the grads.
*/
def gradDict: Map[String, NDArray] = {
if (_gradDict == null) {
@@ -248,7 +246,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
/**
* Get dictionary representation of auxiliary states arrays.
* @return The dictionary that maps name of auxiliary states to NDArrays.
- * @throws IllegalArgumentException if there are duplicated names in the auxiliary states.
*/
def auxDict: Map[String, NDArray] = {
if (_auxDict == null) {
@@ -265,8 +262,6 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle,
* Whether allow extra parameters that are not needed by symbol
* If this is True, no error will be thrown when arg_params or aux_params
* contain extra parameters that is not needed by the executor.
- * @throws IllegalArgumentException
- * If there is additional parameters in the dict but allow_extra_params=False
*/
def copyParamsFrom(argParams: Map[String, NDArray],
auxParams: Map[String, NDArray],
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala b/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
index e835142..b580ad1 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
@@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory
import scala.annotation.varargs
import scala.collection.immutable.ListMap
import scala.collection.mutable.ListBuffer
+import scala.language.implicitConversions
/**
* IO iterators for loading training & validation data
*/
@@ -340,11 +341,11 @@ abstract class DataIter extends Iterator[DataBatch] {
def getIndex(): IndexedSeq[Long]
// The name and shape of data provided by this iterator
- @deprecated
+ @deprecated("Use provideDataDesc instead", "1.3.0")
def provideData: ListMap[String, Shape]
// The name and shape of label provided by this iterator
- @deprecated
+ @deprecated("Use provideLabelDesc instead", "1.3.0")
def provideLabel: ListMap[String, Shape]
// Provide type:DataDesc of the data
@@ -404,7 +405,7 @@ object DataDesc {
}
}
- @deprecated
+ @deprecated("Please use DataDesc methods instead", "1.3.0")
implicit def ListMap2Descs(shapes: ListMap[String, Shape]): IndexedSeq[DataDesc] = {
if (shapes != null) {
shapes.map { case (k, s) => new DataDesc(k, s) }.toIndexedSeq
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
index 45189a1..b2d4349 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
@@ -286,7 +286,7 @@ class KVStore(private[mxnet] val handle: KVStoreHandle) extends NativeResource {
case cachedStates: MXKVStoreCachedStates =>
val bis = new BufferedInputStream (new FileInputStream (fname) )
try {
- val bArray = Stream.continually (bis.read).takeWhile (- 1 !=).map (_.toByte).toArray
+ val bArray = Stream.continually (bis.read).takeWhile (_ != -1).map (_.toByte).toArray
cachedStates.deserializeState(bArray)
} finally {
bis.close ()
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
index f9f2dbe..3a0c3c1 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
@@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
+import scala.language.implicitConversions
import scala.ref.WeakReference
/**
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
index c3f8aae..1fb634c 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
@@ -144,7 +144,7 @@ abstract class Optimizer extends Serializable {
def deserializeState(bytes: Array[Byte]): AnyRef
// Set individual learning rate scale for parameters
- @deprecated("Use setLrMult instead.")
+ @deprecated("Use setLrMult instead.", "0.10.0")
def setLrScale(lrScale: Map[Int, Float]): Unit = {
val argsLrScale: Map[Either[Int, String], Float] = lrScale.map { case (k, v) => Left(k) -> v }
setLrMult(argsLrScale)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
index 30fe147..bb363c0 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
@@ -27,7 +27,7 @@ import scala.util.Try
import scala.util.control.{ControlThrowable, NonFatal}
/**
- * This class manages automatically releasing of [[NativeResource]]s
+ * This class manages automatically releasing of `org.apache.mxnet.NativeResource`s
*/
class ResourceScope extends AutoCloseable {
@@ -43,8 +43,8 @@ class ResourceScope extends AutoCloseable {
ResourceScope.addToThreadLocal(this)
/**
- * Releases all the [[NativeResource]] by calling
- * the associated [[NativeResource.close()]] method
+ * Releases all the `org.apache.mxnet.NativeResource` by calling
+ * the associated`'org.apache.mxnet.NativeResource.close()` method
*/
override def close(): Unit = {
ResourceScope.removeFromThreadLocal(this)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
index 4472a84..01349a6 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
@@ -22,6 +22,7 @@ import org.apache.mxnet.DType.DType
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
+import scala.language.implicitConversions
/**
* Symbolic configuration API of mxnet. <br />
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
index 2a7b7a8..b990137 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
@@ -21,6 +21,7 @@ import scala.util.parsing.json._
import java.io.File
import java.io.PrintWriter
import scala.collection.mutable.ArrayBuffer
+import scala.language.postfixOps
object Visualization {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
index 9980177..a84bd10 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
@@ -158,11 +158,11 @@ private[mxnet] class MXDataIter(private[mxnet] val handle: DataIterHandle,
}
// The name and shape of data provided by this iterator
- @deprecated
+ @deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = _provideData
// The name and shape of label provided by this iterator
- @deprecated
+ @deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = _provideLabel
// Provide type:DataDesc of the data
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala b/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
index e6be0ad..0032a54 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
@@ -237,11 +237,11 @@ class NDArrayIter(data: IndexedSeq[(DataDesc, NDArray)],
// The name and shape of data provided by this iterator
- @deprecated
+ @deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = _provideData
// The name and shape of label provided by this iterator
- @deprecated
+ @deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = _provideLabel
// Provide type:DataDesc of the data
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala b/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
index e59e370..d277351 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
@@ -178,11 +178,11 @@ class PrefetchingIter(
override def getPad(): Int = this.currentBatch.pad
// The name and shape of label provided by this iterator
- @deprecated
+ @deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = this._provideLabel
// The name and shape of data provided by this iterator
- @deprecated
+ @deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = this._provideData
// Provide type:DataDesc of the data
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala b/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
index e840af9..9bc042a 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
@@ -134,13 +134,13 @@ class ResizeIter(
}
// The name and shape of data provided by this iterator
- @deprecated
+ @deprecated("Please use provideDataDesc instead", "1.3.0")
override def provideData: ListMap[String, Shape] = {
dataIter.provideData
}
// The name and shape of label provided by this iterator
- @deprecated
+ @deprecated("Please use provideLabelDesc instead", "1.3.0")
override def provideLabel: ListMap[String, Shape] = {
dataIter.provideLabel
}
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
index 5f0caed..acae8bf 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
@@ -17,6 +17,7 @@
package org.apache.mxnet.javaapi
import collection.JavaConverters._
+import scala.language.implicitConversions
class Context(val context: org.apache.mxnet.Context) {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
index 47b1c36..888a5d8 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
@@ -17,6 +17,8 @@
package org.apache.mxnet.javaapi
+import scala.language.implicitConversions
+
class DataDesc(val dataDesc: org.apache.mxnet.DataDesc) {
def this(name: String, shape: Shape, dType: DType.DType, layout: String) =
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
index 594e3a6..5c4464f 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
@@ -18,6 +18,7 @@
package org.apache.mxnet.javaapi
import collection.JavaConverters._
+import scala.language.implicitConversions
/**
* Shape of [[NDArray]] or other data
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
index 30e57c5..b73f4ad 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
@@ -210,7 +210,7 @@ abstract class BaseModule {
* @param reset Default is `True`, indicating whether we should reset the data iter before start
* doing prediction.
* @return The return value will be a nested list like
- * `[[out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, ...]]`
+ * `[ [out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, ...] ]`
* This mode is useful because in some cases (e.g. bucketing),
* the module does not necessarily produce the same number of outputs.
*/
@@ -501,7 +501,7 @@ abstract class BaseModule {
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
- * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
+ * The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]]
@@ -519,7 +519,7 @@ abstract class BaseModule {
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
- * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
+ * The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]]
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
index 2262f5c..1ac798e 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
@@ -339,7 +339,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, IndexedSeq[String], IndexedSeq[
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
- * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
+ * The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
override def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -363,7 +363,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, IndexedSeq[String], IndexedSeq[
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
- * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
+ * The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
override def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
index 5c567fe..df66ea7 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
@@ -517,7 +517,7 @@ class DataParallelExecutorGroup private[module](
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
- * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
+ * The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -539,7 +539,7 @@ class DataParallelExecutorGroup private[module](
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
- * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
+ * The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
index fec1ba0..97df3dc 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
@@ -486,7 +486,7 @@ class Module(symbolVar: Symbol,
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
- * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
+ * The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -510,7 +510,7 @@ class Module(symbolVar: Symbol,
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
- * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
+ * The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala b/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
index e75550a..2e506c0 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
@@ -346,7 +346,7 @@ class SequentialModule extends BaseModule {
* Get outputs of the previous forward computation.
* @return In the case when data-parallelism is used,
* the outputs will be collected from multiple devices.
- * The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, out2_dev2]]`,
+ * The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, out2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -370,7 +370,7 @@ class SequentialModule extends BaseModule {
* Get the gradients to the inputs, computed in the previous backward computation.
* @return In the case when data-parallelism is used,
* the grads will be collected from multiple devices.
- * The results will look like `[[grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2]]`,
+ * The results will look like `[ [grad1_dev1, grad1_dev2], [grad2_dev1, grad2_dev2] ]`,
* those `NDArray` might live on different devices.
*/
def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {